#![allow(clippy::print_stdout)] #![allow(clippy::print_stderr)]
use std::path::{MAIN_SEPARATOR, PathBuf};
use std::{collections::BTreeMap, env, str::FromStr, time::Instant};
use chrono::{DateTime, SecondsFormat, Utc};
use nu_ansi_term::Color;
pub use test::Test;
use crate::compiler::{
CompilationResult, CompileConfig, Function, Program, SecretTarget, TargetValueRef, TimeZone,
VrlRuntime, compile_with_external,
runtime::{Runtime, Terminate},
state::{ExternalEnv, RuntimeState},
value::VrlValueConvert,
};
use crate::diagnostic::{DiagnosticList, Formatter};
use crate::value::Secrets;
use crate::value::Value;
#[allow(clippy::module_inception)]
mod test;
fn measure_time<F, R>(f: F) -> (R, std::time::Duration)
where
F: FnOnce() -> R, {
let start = Instant::now();
let result = f(); let duration = start.elapsed();
(result, duration) }
pub struct TestConfig {
pub fail_early: bool,
pub verbose: bool,
pub no_diff: bool,
pub timings: bool,
pub runtime: VrlRuntime,
pub timezone: TimeZone,
pub run_skipped: bool,
}
#[derive(Clone)]
struct FailedTest {
name: String,
category: String,
source_file: String,
source_line: u32,
}
pub fn test_dir() -> PathBuf {
PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap())
}
pub fn test_prefix() -> String {
let mut prefix = test_dir().join("tests").to_string_lossy().to_string();
prefix.push(MAIN_SEPARATOR);
prefix
}
pub fn example_vrl_path() -> PathBuf {
test_dir().join("tests").join("example.vrl")
}
pub fn get_tests_from_functions(functions: Vec<Box<dyn Function>>) -> Vec<Test> {
let mut tests = vec![];
functions.into_iter().for_each(|function| {
if let Some(closure) = function.closure() {
closure.inputs.iter().for_each(|input| {
let test = Test::from_example(
format!("{} (closure)", function.identifier()),
&input.example,
);
tests.push(test);
});
}
function.examples().iter().for_each(|example| {
let test = Test::from_example(function.identifier(), example);
tests.push(test)
})
});
tests
}
pub fn run_tests<T>(
tests: Vec<Test>,
cfg: &TestConfig,
functions: &[Box<dyn Function>],
compile_config_provider: impl Fn() -> (CompileConfig, T),
finalize_config: impl Fn(T),
) {
let total_count = tests.len();
let mut failed_count = 0;
let mut warnings_count = 0;
let mut category = "".to_owned();
let mut failed_tests: Vec<FailedTest> = Vec::new();
for mut test in tests {
if category != test.category {
category.clone_from(&test.category);
println!("{}", Color::Fixed(3).bold().paint(category.to_string()));
}
if let Some(err) = test.error {
println!("{}", Color::Purple.bold().paint("INVALID"));
println!("{}", Color::Red.paint(err));
failed_count += 1;
continue;
}
let mut name = test.name.clone();
name.truncate(58);
let dots = if name.len() >= 60 { 0 } else { 60 - name.len() };
print!(" {}{}", name, Color::Fixed(240).paint(".".repeat(dots)));
let (mut config, config_metadata) = (compile_config_provider)();
for (path, recursive) in &test.read_only_paths {
config.set_read_only_path(path.clone(), *recursive);
}
let (result, compile_duration) = measure_time(|| {
compile_with_external(&test.source, functions, &ExternalEnv::default(), config)
});
let compile_timing_fmt = if cfg.timings {
format!("comp: {compile_duration:>9.3?}")
} else {
String::new()
};
let failed = match result {
Ok(CompilationResult {
program,
warnings,
config: _,
}) => {
warnings_count += warnings.len();
if test.skip && !cfg.run_skipped {
println!("{}", Color::Yellow.bold().paint("OK (compile only)"));
false
} else if test.check_diagnostics {
process_compilation_diagnostics(&test, cfg, warnings, compile_timing_fmt)
} else if warnings.is_empty() {
let run_start = Instant::now();
finalize_config(config_metadata);
let result = run_vrl(program, &mut test.object, cfg.timezone, cfg.runtime);
let run_end = run_start.elapsed();
let timings = {
let timings_color = if run_end.as_millis() > 10 { 1 } else { 245 };
let timings_fmt = if cfg.timings {
format!(" ({compile_timing_fmt}, run: {run_end:>9.3?})")
} else {
String::new()
};
Color::Fixed(timings_color).paint(timings_fmt).to_string()
};
process_result(result, &mut test, cfg, timings)
} else {
println!("{} (diagnostics)", Color::Red.bold().paint("FAILED"));
let formatter = Formatter::new(&test.source, warnings);
println!("{formatter}");
true
}
}
Err(diagnostics) => {
warnings_count += diagnostics.warnings().len();
process_compilation_diagnostics(&test, cfg, diagnostics, compile_timing_fmt)
}
};
if failed {
failed_count += 1;
failed_tests.push(FailedTest {
name: test.name.clone(),
category: test.category.clone(),
source_file: test.source_file.clone(),
source_line: test.source_line,
});
}
}
print_result(total_count, failed_count, warnings_count, failed_tests);
}
fn process_result(
result: Result<Value, Terminate>,
test: &mut Test,
config: &TestConfig,
timings: String,
) -> bool {
match result {
Ok(got) => {
let got_value = vrl_value_to_json_value(got);
let mut failed = false;
let match_mode = if test.check_type_only {
MatchMode::TypeOnly
} else {
MatchMode::Exact
};
let want = test.result.clone();
let want_value = if want.starts_with("r'") && want.ends_with('\'') {
match regex::Regex::new(&want[2..want.len() - 1].replace("\\'", "'")) {
Ok(regex) => regex.to_string().into(),
Err(_) => want.into(),
}
} else if want.starts_with("t'") && want.ends_with('\'') {
match DateTime::<Utc>::from_str(&want[2..want.len() - 1]) {
Ok(dt) => dt.to_rfc3339_opts(SecondsFormat::AutoSi, true).into(),
Err(_) => want.into(),
}
} else if want.starts_with("s'") && want.ends_with('\'') {
want[2..want.len() - 1].into()
} else {
serde_json::from_str::<'_, serde_json::Value>(want.trim()).unwrap_or_else(|err| {
eprintln!("{err}");
want.into()
})
};
if match_mode.matches(&got_value, &want_value) {
print!(
"{timings}{}",
Color::Green.bold().paint(match_mode.ok_label())
);
} else {
print!("{}", Color::Red.bold().paint(match_mode.fail_label()));
if !config.no_diff {
let want = serde_json::to_string_pretty(&want_value).unwrap();
let got = serde_json::to_string_pretty(&got_value).unwrap();
let diff = prettydiff::diff_lines(&want, &got);
println!(" {diff}");
}
failed = true;
}
println!();
if config.verbose {
println!("{got_value:#}");
}
if failed && config.fail_early {
std::process::exit(1)
}
failed
}
Err(err) => {
let mut failed = false;
let got = err.to_string().trim().to_owned();
let want = test.result.clone().trim().to_owned();
if (test.result_approx && compare_partial_diagnostic(&got, &want)) || got == want {
println!("{}{}", Color::Green.bold().paint("OK"), timings);
} else if matches!(err, Terminate::Abort { .. }) {
let want =
serde_json::from_str::<'_, serde_json::Value>(&want).unwrap_or_else(|err| {
eprintln!("{err}");
want.into()
});
let got = vrl_value_to_json_value(test.object.clone());
if got == want {
println!("{}{}", Color::Green.bold().paint("OK"), timings);
} else {
println!("{} (abort)", Color::Red.bold().paint("FAILED"));
if !config.no_diff {
let want = serde_json::to_string_pretty(&want).unwrap();
let got = serde_json::to_string_pretty(&got).unwrap();
let diff = prettydiff::diff_lines(&want, &got);
println!("{diff}");
}
failed = true;
}
} else {
println!("{} (runtime)", Color::Red.bold().paint("FAILED"));
if !config.no_diff {
let diff = prettydiff::diff_lines(&want, &got);
println!("{diff}");
}
failed = true;
}
if config.verbose {
println!("{err:#}");
}
if failed && config.fail_early {
std::process::exit(1)
}
failed
}
}
}
fn process_compilation_diagnostics(
test: &Test,
cfg: &TestConfig,
diagnostics: DiagnosticList,
compile_timing_fmt: String,
) -> bool {
let mut failed = false;
let mut formatter = Formatter::new(&test.source, diagnostics);
let got = formatter.to_string();
let got = got.trim();
let want = test.result.clone();
let want = want.trim();
if (test.result_approx && compare_partial_diagnostic(got, want)) || got == want {
let timings = {
let timings_fmt = if cfg.timings {
format!(" ({compile_timing_fmt})")
} else {
String::new()
};
Color::Fixed(245).paint(timings_fmt).to_string()
};
println!("{}{timings}", Color::Green.bold().paint("OK"));
} else {
println!("{} (compilation)", Color::Red.bold().paint("FAILED"));
if !cfg.no_diff {
let diff = prettydiff::diff_lines(want, got);
println!("{diff}");
}
formatter.enable_colors(true);
println!("{formatter:#}");
failed = true;
}
if cfg.verbose && !failed {
formatter.enable_colors(true);
println!("{formatter:#}");
}
if failed && cfg.fail_early {
std::process::exit(1)
}
failed
}
fn print_result(
total_count: usize,
failed_count: usize,
warnings_count: usize,
failed_tests: Vec<FailedTest>,
) {
let code = i32::from(failed_count > 0);
println!("\n");
let passed_count = total_count - failed_count;
if failed_count > 0 {
println!(
"Overall result: {}\n\n Number failed: {}\n Number passed: {}",
Color::Red.bold().paint("FAILED"),
Color::Red.bold().paint(failed_count.to_string()),
Color::Green.bold().paint(passed_count.to_string())
);
} else {
println!(
"Overall result: {}\n Number passed: {}",
Color::Green.bold().paint("SUCCESS"),
Color::Green.bold().paint(passed_count.to_string())
);
}
println!(
" Number warnings: {}",
Color::Yellow.bold().paint(warnings_count.to_string())
);
if !failed_tests.is_empty() {
println!("\n{}", Color::Red.bold().paint("Failed tests:"));
for test in failed_tests {
println!(
" {} - {}:{}",
Color::Yellow.paint(format!("{}/{}", test.category, test.name)),
test.source_file,
test.source_line
);
}
}
std::process::exit(code)
}
fn compare_partial_diagnostic(got: &str, want: &str) -> bool {
got.lines()
.filter(|line| line.trim().starts_with("error[E"))
.zip(want.trim().lines())
.all(|(got, want)| got.contains(want))
}
fn vrl_value_to_json_value(value: Value) -> serde_json::Value {
use serde_json::Value::*;
match value {
v @ Value::Bytes(_) => String(v.try_bytes_utf8_lossy().unwrap().into_owned()),
Value::Integer(v) => v.into(),
Value::Float(v) => v.into_inner().into(),
Value::Boolean(v) => v.into(),
Value::Object(v) => v
.into_iter()
.map(|(k, v)| (k, vrl_value_to_json_value(v)))
.collect::<serde_json::Value>(),
Value::Array(v) => v
.into_iter()
.map(vrl_value_to_json_value)
.collect::<serde_json::Value>(),
Value::Timestamp(v) => v.to_rfc3339_opts(SecondsFormat::AutoSi, true).into(),
Value::Regex(v) => v.to_string().into(),
Value::Null => Null,
}
}
enum MatchMode {
Exact,
TypeOnly,
}
impl MatchMode {
fn matches(&self, got: &serde_json::Value, want: &serde_json::Value) -> bool {
match self {
MatchMode::Exact => got == want,
MatchMode::TypeOnly => std::mem::discriminant(got) == std::mem::discriminant(want),
}
}
fn ok_label(&self) -> &'static str {
match self {
MatchMode::Exact => "OK",
MatchMode::TypeOnly => "OK (type match)",
}
}
fn fail_label(&self) -> &'static str {
match self {
MatchMode::Exact => "FAILED (expectation)",
MatchMode::TypeOnly => "FAILED (type mismatch)",
}
}
}
fn run_vrl(
program: Program,
test_object: &mut Value,
timezone: TimeZone,
vrl_runtime: VrlRuntime,
) -> Result<Value, Terminate> {
let mut metadata = Value::from(BTreeMap::new());
let mut target = TargetValueRef {
value: test_object,
metadata: &mut metadata,
secrets: &mut Secrets::new(),
};
target.insert_secret("my_secret", "secret value");
target.insert_secret("datadog_api_key", "secret value");
match vrl_runtime {
VrlRuntime::Ast => {
let mut runtime = Runtime::new(RuntimeState::default());
runtime.resolve(&mut target, &program, &timezone)
}
}
}