use crate::parse::ResultParser;
use crate::parse_fluster::FlusterResultParser;
use crate::runner_results::*;
use crate::timeout::{TimeoutChildStdout, Timer};
use crate::{
parse_regex_set, CaselistResult, FailCounter, SubRunConfig, TestCase, TestCommand,
TestConfiguration, TestStatus,
};
use anyhow::{Context, Result};
use regex::{Regex, RegexSet};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use std::process::{Command, Stdio};
pub struct FlusterCommand {
pub run_config: FlusterRunConfig,
pub config: TestConfiguration,
pub args: Vec<String>,
pub results_dir: PathBuf,
}
#[derive(Deserialize, Clone)]
pub struct FlusterRunConfig {
pub bin: PathBuf,
pub decoders: Vec<String>,
pub test_suites_dir: Option<PathBuf>,
}
#[derive(Deserialize)]
pub struct FlusterTomlConfig {
#[serde(flatten)]
pub sub_config: SubRunConfig,
#[serde(flatten)]
pub fluster_config: FlusterRunConfig,
#[serde(default)]
pub prefix: String,
}
impl FlusterTomlConfig {
pub fn test_groups<'d>(
&self,
fluster: &'d FlusterCommand,
filters: &[String],
) -> Result<Vec<(&'d dyn TestCommand, Vec<TestCase>)>> {
let mut tests = fluster.list_tests().context("Listing fluster tests")?;
let tests_per_group = tests.len();
let mut include_filters = vec![];
if !filters.is_empty() {
include_filters.push(parse_regex_set(filters).context("compiling include filters")?);
}
fluster.group_tests_per_decoder(
&mut tests,
tests_per_group,
0,
&self.sub_config,
&include_filters,
)
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FlusterVectors {
pub test_vectors: Vec<FlusterVector>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FlusterVector {
pub name: String,
}
impl TestStatus {}
impl FlusterCommand {
fn results_dir(&self, caselist_state: &CaselistState) -> PathBuf {
self.results_dir.join(format!(
"fluster-results-{}-{}",
caselist_state.run_id, caselist_state.caselist_id
))
}
pub fn group_tests_per_decoder(
&self,
test_cases: &mut [TestCase],
tests_per_group: usize,
min_tests_per_group: usize,
sub_config: &SubRunConfig,
include_filters: &[RegexSet],
) -> Result<Vec<(&dyn TestCommand, Vec<TestCase>)>> {
let mut test_groups: Vec<(&dyn TestCommand, Vec<TestCase>)> = Vec::new();
for decoder in &self.run_config.decoders {
let mut filters = include_filters.to_vec();
filters.push(RegexSet::new([format!(r"^{decoder}@")])?);
test_groups.append(&mut self.split_tests_to_groups(
test_cases.to_vec(),
tests_per_group,
min_tests_per_group,
sub_config,
&filters,
)?);
}
Ok(test_groups)
}
pub fn codec_from_decoder(
decoder: &str,
codecs: &HashMap<String, Vec<String>>,
) -> Option<String> {
codecs.iter().find_map(|(c, d)| {
if d.contains(&decoder.to_string()) {
Some(c.to_string())
} else {
None
}
})
}
pub fn map_codecs_decoders(
bin: &PathBuf,
extra_args: &[String],
) -> Result<HashMap<String, Vec<String>>> {
let decoder_re = Regex::new(r" {4}(?P<decoder>.*): .*$")?;
let codec_re = Regex::new(r"^(?P<codec>[^ ]*)$")?;
let mut decoders = HashMap::new();
let mut command = Command::new(bin);
let child = command
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.stdin(Stdio::null())
.args(extra_args)
.arg("list")
.spawn()
.context("Running fluster.py list")?;
let result_lines: Vec<String> =
BufReader::new(child.stdout.context("Cannot get command stdout")?)
.lines()
.flatten()
.collect();
let mut found = false;
let mut current_codec = None;
for line in &result_lines {
if line == "List of available decoders:" {
found = true;
continue;
}
if !found {
continue;
}
if let Some(cap) = codec_re.captures(line) {
current_codec = Some(cap["codec"].to_string());
}
if let Some(cap) = decoder_re.captures(line) {
decoders
.entry(current_codec.as_ref().context("Parsing decoders")?.clone())
.or_insert(vec![])
.push(cap["decoder"].to_string());
}
}
Ok(decoders)
}
pub fn list_tests(&self) -> Result<Vec<TestCase>> {
let mut tests = vec![];
let codecs = Self::map_codecs_decoders(&self.run_config.bin, &self.args)?;
for decoder in &self.run_config.decoders {
let codec = Self::codec_from_decoder(decoder, &codecs)
.context(format!("Cannot get codec from decoder {}", decoder))?
.to_lowercase();
let test_suites = if let Some(path) = &self.run_config.test_suites_dir {
path.clone().join("test_suites").join(&codec)
} else {
self.run_config
.bin
.parent()
.context("Cannot get fluster.py parent dir")?
.join("test_suites")
.join(&codec)
};
for vectors in test_suites.read_dir()? {
let path = vectors?.path();
let vectors_file =
fs::File::open(&path).context(format!("opening {}", path.to_string_lossy()))?;
let vectors: FlusterVectors = serde_yaml::from_reader(vectors_file)
.context(format!("reading test vector: {}", path.to_string_lossy()))?;
for v in &vectors.test_vectors {
tests.push(TestCase::Named(format!("{}@{}", decoder, v.name)));
}
}
}
Ok(tests)
}
fn split_test_name(test: &str) -> Result<(&str, &str)> {
test.split_once('@')
.context(format!("Extracting decoder form test name {test}"))
}
}
impl TestCommand for FlusterCommand {
fn name(&self) -> &str {
"fluster"
}
fn config(&self) -> &TestConfiguration {
&self.config
}
fn see_more(&self, _test_name: &str, caselist_state: &CaselistState) -> String {
let log_path = self.config.output_dir.join(
format!(
"c{}.r{}.log",
caselist_state.caselist_id, caselist_state.run_id
)
.as_str(),
);
format!("See {:?}", log_path)
}
fn prepare(&self, caselist_state: &CaselistState, tests: &[&TestCase]) -> Result<Command> {
let mut command = Command::new(&self.run_config.bin);
command
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.stdin(Stdio::null())
.args(&self.args);
if let Some(path) = &self.run_config.test_suites_dir {
command.arg("--test-suites-dir").arg(path);
}
let (decoder, _) = Self::split_test_name(tests[0].name())?;
command
.arg("--output")
.arg(self.results_dir(caselist_state))
.arg("run")
.arg("-j")
.arg("1")
.arg("-d")
.arg(decoder);
command.arg("--testvectors").args(
tests
.iter()
.map(|t| Self::split_test_name(t.name()).unwrap_or(("", "")).1),
);
Ok(command)
}
fn parse_results(
&self,
_caselist_state: &CaselistState,
_tests: &[&TestCase],
stdout: TimeoutChildStdout,
timer: Option<Timer>,
fail_counter: Option<FailCounter>,
) -> Result<CaselistResult> {
let parser = FlusterResultParser::new();
parser.parse_with_timer(stdout, timer, fail_counter)
}
fn clean(
&self,
caselist_state: &CaselistState,
_tests: &[&TestCase],
results: &[RunnerResult],
) -> Result<()> {
if !results.is_empty()
&& results
.iter()
.all(|x| !x.status.should_save_logs(self.config.save_xfail_logs))
{
let log_path = self.results_dir(caselist_state);
if log_path.exists() {
return fs::remove_dir_all(&log_path)
.context(format!("Removing logs in {}", log_path.to_string_lossy()));
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{CaselistResult, TestResult};
use std::time::Duration;
fn parse_fluster_results(output: &mut &[u8]) -> Result<CaselistResult> {
let parser = FlusterResultParser::new();
parser.parse(output, None)
}
fn result(name: &str, status: TestStatus) -> TestResult {
TestResult {
name: name.to_string(),
status,
duration: Duration::new(0, 0),
subtests: vec![],
}
}
#[test]
fn parse_results() -> Result<()> {
use TestStatus::*;
let output = "****************************************************************************************************
Running test suite VP8-TEST-VECTORS with decoder GStreamer-VP8-VAAPI-Gst1.0
Using 1 parallel job(s)
****************************************************************************************************
[TEST SUITE ] (DECODER ) TEST VECTOR ... RESULT
----------------------------------------------------------------------
[VP8-TEST-VECTORS] (GStreamer-VP8-VAAPI-Gst1.0) vp80-00-comprehensive-001 ... Not Run
[VP8-TEST-VECTORS] (GStreamer-VP8-VAAPI-Gst1.0) vp80-00-comprehensive-002 ... Timeout
[VP8-TEST-VECTORS] (GStreamer-VP8-VAAPI-Gst1.0) vp80-00-comprehensive-003 ... Fail
[VP8-TEST-VECTORS] (GStreamer-VP8-VAAPI-Gst1.0) vp80-00-comprehensive-004 ... Success
[VP8-TEST-VECTORS] (GStreamer-VP8-VAAPI-Gst1.0) vp80-00-comprehensive-005 ... Error
Ran 3/3 tests successfully in 0.166 secs";
let results = parse_fluster_results(&mut output.as_bytes())?.results;
assert_eq!(
results,
vec!(
result("GStreamer-VP8-VAAPI-Gst1.0@vp80-00-comprehensive-001", Skip),
result(
"GStreamer-VP8-VAAPI-Gst1.0@vp80-00-comprehensive-002",
Timeout,
),
result("GStreamer-VP8-VAAPI-Gst1.0@vp80-00-comprehensive-003", Fail),
result("GStreamer-VP8-VAAPI-Gst1.0@vp80-00-comprehensive-004", Pass),
result(
"GStreamer-VP8-VAAPI-Gst1.0@vp80-00-comprehensive-005",
Crash,
),
)
);
Ok(())
}
}