pub mod aliases;
pub mod validate;
use std::collections::HashMap;
use crate::encoder::EncoderConfig;
use crate::generator::{CsvColumnSpec, GeneratorConfig, LogGeneratorConfig};
use crate::sink::SinkConfig;
use crate::{ConfigError, SondaError};
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct GapConfig {
pub every: String,
pub r#for: String,
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "config", serde(rename_all = "snake_case"))]
pub enum SpikeStrategy {
#[default]
Counter,
Random,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct CardinalitySpikeConfig {
pub label: String,
pub every: String,
pub r#for: String,
pub cardinality: u64,
#[cfg_attr(feature = "config", serde(default))]
pub strategy: SpikeStrategy,
#[cfg_attr(feature = "config", serde(default))]
pub prefix: Option<String>,
#[cfg_attr(feature = "config", serde(default))]
pub seed: Option<u64>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "config", serde(untagged))]
pub enum DynamicLabelStrategy {
ValuesList {
values: Vec<String>,
},
Counter {
#[cfg_attr(feature = "config", serde(default))]
prefix: Option<String>,
cardinality: u64,
},
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct DynamicLabelConfig {
pub key: String,
#[cfg_attr(feature = "config", serde(flatten))]
pub strategy: DynamicLabelStrategy,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct BurstConfig {
pub every: String,
pub r#for: String,
pub multiplier: f64,
}
#[cfg(feature = "config")]
fn default_encoder() -> EncoderConfig {
EncoderConfig::PrometheusText { precision: None }
}
#[cfg(feature = "config")]
fn default_log_encoder() -> EncoderConfig {
EncoderConfig::JsonLines { precision: None }
}
#[cfg(feature = "config")]
fn default_sink() -> SinkConfig {
SinkConfig::Stdout
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "config", serde(rename_all = "lowercase"))]
pub enum OnSinkError {
#[default]
Warn,
Fail,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct BaseScheduleConfig {
pub name: String,
pub rate: f64,
#[cfg_attr(feature = "config", serde(default))]
pub duration: Option<String>,
#[cfg_attr(feature = "config", serde(default))]
pub gaps: Option<GapConfig>,
#[cfg_attr(feature = "config", serde(default))]
pub bursts: Option<BurstConfig>,
#[cfg_attr(feature = "config", serde(default))]
pub cardinality_spikes: Option<Vec<CardinalitySpikeConfig>>,
#[cfg_attr(feature = "config", serde(default))]
pub dynamic_labels: Option<Vec<DynamicLabelConfig>>,
#[cfg_attr(feature = "config", serde(default))]
pub labels: Option<HashMap<String, String>>,
#[cfg_attr(feature = "config", serde(default = "default_sink"))]
pub sink: SinkConfig,
#[cfg_attr(feature = "config", serde(default))]
pub phase_offset: Option<String>,
#[cfg_attr(feature = "config", serde(default))]
pub clock_group: Option<String>,
#[cfg_attr(feature = "config", serde(skip))]
pub clock_group_is_auto: Option<bool>,
#[cfg_attr(feature = "config", serde(default))]
pub jitter: Option<f64>,
#[cfg_attr(feature = "config", serde(default))]
pub jitter_seed: Option<u64>,
#[cfg_attr(feature = "config", serde(default))]
pub on_sink_error: OnSinkError,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct ScenarioConfig {
#[cfg_attr(feature = "config", serde(flatten))]
pub base: BaseScheduleConfig,
pub generator: GeneratorConfig,
#[cfg_attr(feature = "config", serde(default = "default_encoder"))]
pub encoder: EncoderConfig,
}
impl std::ops::Deref for ScenarioConfig {
type Target = BaseScheduleConfig;
fn deref(&self) -> &BaseScheduleConfig {
&self.base
}
}
impl std::ops::DerefMut for ScenarioConfig {
fn deref_mut(&mut self) -> &mut BaseScheduleConfig {
&mut self.base
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "config", serde(tag = "type"))]
#[non_exhaustive]
pub enum DistributionConfig {
#[cfg_attr(feature = "config", serde(rename = "exponential"))]
Exponential {
rate: f64,
},
#[cfg_attr(feature = "config", serde(rename = "normal"))]
Normal {
mean: f64,
stddev: f64,
},
#[cfg_attr(feature = "config", serde(rename = "uniform"))]
Uniform {
min: f64,
max: f64,
},
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct HistogramScenarioConfig {
#[cfg_attr(feature = "config", serde(flatten))]
pub base: BaseScheduleConfig,
#[cfg_attr(feature = "config", serde(default))]
pub buckets: Option<Vec<f64>>,
pub distribution: DistributionConfig,
#[cfg_attr(feature = "config", serde(default))]
pub observations_per_tick: Option<u64>,
#[cfg_attr(feature = "config", serde(default))]
pub mean_shift_per_sec: Option<f64>,
#[cfg_attr(feature = "config", serde(default))]
pub seed: Option<u64>,
#[cfg_attr(feature = "config", serde(default = "default_encoder"))]
pub encoder: EncoderConfig,
}
impl std::ops::Deref for HistogramScenarioConfig {
type Target = BaseScheduleConfig;
fn deref(&self) -> &BaseScheduleConfig {
&self.base
}
}
impl std::ops::DerefMut for HistogramScenarioConfig {
fn deref_mut(&mut self) -> &mut BaseScheduleConfig {
&mut self.base
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct SummaryScenarioConfig {
#[cfg_attr(feature = "config", serde(flatten))]
pub base: BaseScheduleConfig,
#[cfg_attr(feature = "config", serde(default))]
pub quantiles: Option<Vec<f64>>,
pub distribution: DistributionConfig,
#[cfg_attr(feature = "config", serde(default))]
pub observations_per_tick: Option<u64>,
#[cfg_attr(feature = "config", serde(default))]
pub mean_shift_per_sec: Option<f64>,
#[cfg_attr(feature = "config", serde(default))]
pub seed: Option<u64>,
#[cfg_attr(feature = "config", serde(default = "default_encoder"))]
pub encoder: EncoderConfig,
}
impl std::ops::Deref for SummaryScenarioConfig {
type Target = BaseScheduleConfig;
fn deref(&self) -> &BaseScheduleConfig {
&self.base
}
}
impl std::ops::DerefMut for SummaryScenarioConfig {
fn deref_mut(&mut self) -> &mut BaseScheduleConfig {
&mut self.base
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "config", serde(tag = "signal_type"))]
#[non_exhaustive]
pub enum ScenarioEntry {
#[cfg_attr(feature = "config", serde(rename = "metrics"))]
Metrics(ScenarioConfig),
#[cfg_attr(feature = "config", serde(rename = "logs"))]
Logs(LogScenarioConfig),
#[cfg_attr(feature = "config", serde(rename = "histogram"))]
Histogram(HistogramScenarioConfig),
#[cfg_attr(feature = "config", serde(rename = "summary"))]
Summary(SummaryScenarioConfig),
}
impl ScenarioEntry {
pub fn base(&self) -> &BaseScheduleConfig {
match self {
ScenarioEntry::Metrics(c) => &c.base,
ScenarioEntry::Logs(c) => &c.base,
ScenarioEntry::Histogram(c) => &c.base,
ScenarioEntry::Summary(c) => &c.base,
}
}
pub fn phase_offset(&self) -> Option<&str> {
self.base().phase_offset.as_deref()
}
pub fn clock_group(&self) -> Option<&str> {
self.base().clock_group.as_deref()
}
pub fn clock_group_is_auto(&self) -> Option<bool> {
self.base().clock_group_is_auto
}
#[allow(unreachable_patterns)]
pub fn signal_type_name(&self) -> &'static str {
match self {
ScenarioEntry::Metrics(_) => "metrics",
ScenarioEntry::Logs(_) => "logs",
ScenarioEntry::Histogram(_) => "histogram",
ScenarioEntry::Summary(_) => "summary",
_ => "unknown",
}
}
}
fn validate_csv_columns(columns: &Option<Vec<CsvColumnSpec>>) -> Result<(), SondaError> {
if let Some(ref cols) = columns {
if cols.is_empty() {
return Err(SondaError::Config(ConfigError::invalid(
"csv_replay: 'columns' must not be empty; provide at least one column spec or omit the field",
)));
}
let mut seen_indices = std::collections::HashSet::with_capacity(cols.len());
for spec in cols {
if !seen_indices.insert(spec.index) {
return Err(SondaError::Config(ConfigError::invalid(format!(
"csv_replay: duplicate column index {}; each column index must be unique",
spec.index
))));
}
}
let mut seen_names = std::collections::HashSet::with_capacity(cols.len());
for spec in cols {
if !seen_names.insert(&spec.name) {
return Err(SondaError::Config(ConfigError::invalid(format!(
"csv_replay: duplicate column name '{}'; each column name must be unique",
spec.name
))));
}
}
}
Ok(())
}
fn read_csv_header(path: &str) -> Result<String, SondaError> {
use std::io::BufRead;
let file = std::fs::File::open(path).map_err(|e| {
SondaError::Generator(crate::GeneratorError::FileRead {
path: path.to_string(),
source: e,
})
})?;
let reader = std::io::BufReader::new(file);
for line_result in reader.lines() {
let line = line_result.map_err(|e| {
SondaError::Generator(crate::GeneratorError::FileRead {
path: path.to_string(),
source: e,
})
})?;
let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with('#') {
continue;
}
return Ok(line);
}
Err(SondaError::Config(ConfigError::invalid(format!(
"csv_replay: file {:?} has no non-comment, non-empty lines",
path
))))
}
fn is_csv_header_line(line: &str) -> bool {
crate::generator::csv_header::is_header_line(line)
}
pub fn expand_scenario(config: ScenarioConfig) -> Result<Vec<ScenarioConfig>, SondaError> {
let specs = match &config.generator {
GeneratorConfig::CsvReplay { columns, file, .. } => {
validate_csv_columns(columns)?;
if let Some(ref cols) = columns {
cols.clone()
} else {
let header_line = read_csv_header(file)?;
if !is_csv_header_line(&header_line) {
return Err(SondaError::Config(ConfigError::invalid(
"csv_replay: CSV file has no header row (first data line is all numeric); \
provide explicit 'columns' in the config",
)));
}
let parsed = crate::generator::csv_header::parse_header_row(&header_line)?;
let mut auto_specs = Vec::with_capacity(parsed.len().saturating_sub(1));
for (i, ph) in parsed.into_iter().enumerate().skip(1) {
let name = ph.metric_name.ok_or_else(|| {
SondaError::Config(ConfigError::invalid(format!(
"csv_replay: column {} has no metric name \
(header has labels only with no __name__)",
i
)))
})?;
let labels = if ph.labels.is_empty() {
None
} else {
Some(ph.labels)
};
auto_specs.push(CsvColumnSpec {
index: i,
name,
labels,
});
}
if auto_specs.is_empty() {
return Err(SondaError::Config(ConfigError::invalid(
"csv_replay: no data columns found after skipping column 0",
)));
}
auto_specs
}
}
_ => return Ok(vec![config]),
};
let expanded = specs
.into_iter()
.map(|spec| {
let mut child = config.clone();
child.base.name = spec.name;
if let Some(ref col_labels) = spec.labels {
let merged = child.base.labels.get_or_insert_with(HashMap::new);
for (k, v) in col_labels {
merged.insert(k.clone(), v.clone());
}
}
if let GeneratorConfig::CsvReplay {
ref mut column,
ref mut columns,
..
} = child.generator
{
*column = Some(spec.index);
*columns = None;
}
child
})
.collect();
Ok(expanded)
}
pub fn expand_entry(entry: ScenarioEntry) -> Result<Vec<ScenarioEntry>, SondaError> {
match entry {
ScenarioEntry::Metrics(config) => {
let expanded = expand_scenario(config)?;
Ok(expanded.into_iter().map(ScenarioEntry::Metrics).collect())
}
other => Ok(vec![other]),
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize, serde::Deserialize))]
pub struct LogScenarioConfig {
#[cfg_attr(feature = "config", serde(flatten))]
pub base: BaseScheduleConfig,
pub generator: LogGeneratorConfig,
#[cfg_attr(feature = "config", serde(default = "default_log_encoder"))]
pub encoder: EncoderConfig,
}
impl std::ops::Deref for LogScenarioConfig {
type Target = BaseScheduleConfig;
fn deref(&self) -> &BaseScheduleConfig {
&self.base
}
}
impl std::ops::DerefMut for LogScenarioConfig {
fn deref_mut(&mut self) -> &mut BaseScheduleConfig {
&mut self.base
}
}
#[cfg(all(test, feature = "config"))]
mod tests {
use std::collections::BTreeMap;
use super::*;
#[test]
fn scenario_config_phase_offset_deserializes_from_yaml() {
let yaml = r#"
name: test_metric
rate: 10
generator:
type: constant
value: 1.0
phase_offset: "5s"
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.phase_offset.as_deref(), Some("5s"));
}
#[test]
fn scenario_config_phase_offset_defaults_to_none() {
let yaml = r#"
name: test_metric
rate: 10
generator:
type: constant
value: 1.0
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.phase_offset.is_none());
}
#[test]
fn scenario_config_phase_offset_milliseconds() {
let yaml = r#"
name: ms_test
rate: 10
generator:
type: constant
value: 1.0
phase_offset: "500ms"
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.phase_offset.as_deref(), Some("500ms"));
}
#[test]
fn scenario_config_phase_offset_minutes() {
let yaml = r#"
name: min_test
rate: 10
generator:
type: constant
value: 1.0
phase_offset: "2m"
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.phase_offset.as_deref(), Some("2m"));
}
#[test]
fn log_scenario_config_phase_offset_deserializes_from_yaml() {
let yaml = r#"
name: log_test
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
phase_offset: "3s"
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.phase_offset.as_deref(), Some("3s"));
}
#[test]
fn log_scenario_config_phase_offset_defaults_to_none() {
let yaml = r#"
name: log_test
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.phase_offset.is_none());
}
#[test]
fn scenario_config_clock_group_deserializes_from_yaml() {
let yaml = r#"
name: group_test
rate: 10
generator:
type: constant
value: 1.0
clock_group: alert-test
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.clock_group.as_deref(), Some("alert-test"));
}
#[test]
fn scenario_config_clock_group_defaults_to_none() {
let yaml = r#"
name: no_group
rate: 10
generator:
type: constant
value: 1.0
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.clock_group.is_none());
}
#[test]
fn log_scenario_config_clock_group_deserializes_from_yaml() {
let yaml = r#"
name: log_group
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
clock_group: log-sync
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.clock_group.as_deref(), Some("log-sync"));
}
#[test]
fn log_scenario_config_clock_group_defaults_to_none() {
let yaml = r#"
name: log_no_group
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.clock_group.is_none());
}
#[test]
fn scenario_config_jitter_deserializes_from_yaml() {
let yaml = r#"
name: jitter_test
rate: 10
generator:
type: constant
value: 1.0
jitter: 3.5
jitter_seed: 42
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.jitter, Some(3.5));
assert_eq!(config.jitter_seed, Some(42));
}
#[test]
fn scenario_config_jitter_defaults_to_none() {
let yaml = r#"
name: no_jitter
rate: 10
generator:
type: constant
value: 1.0
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.jitter.is_none());
assert!(config.jitter_seed.is_none());
}
#[test]
fn scenario_config_jitter_without_seed() {
let yaml = r#"
name: jitter_no_seed
rate: 10
generator:
type: sine
amplitude: 20
period_secs: 60
offset: 50
jitter: 5.0
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.jitter, Some(5.0));
assert!(config.jitter_seed.is_none());
}
#[test]
fn log_scenario_config_jitter_deserializes_from_yaml() {
let yaml = r#"
name: log_jitter
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
jitter: 2.0
jitter_seed: 99
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.jitter, Some(2.0));
assert_eq!(config.jitter_seed, Some(99));
}
#[test]
fn log_scenario_config_labels_deserialize_from_yaml() {
let yaml = r#"
name: labeled_logs
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
labels:
device: wlan0
hostname: router-01
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let labels = config.labels.as_ref().expect("labels must be Some");
assert_eq!(labels.get("device").map(String::as_str), Some("wlan0"));
assert_eq!(
labels.get("hostname").map(String::as_str),
Some("router-01")
);
assert_eq!(labels.len(), 2);
}
#[test]
fn log_scenario_config_labels_default_to_none() {
let yaml = r#"
name: no_labels_logs
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(
config.labels.is_none(),
"labels must default to None when not in YAML"
);
}
#[test]
fn log_scenario_config_empty_labels_deserializes_as_some_empty_map() {
let yaml = r#"
name: empty_labels
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
labels: {}
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let labels = config
.labels
.as_ref()
.expect("labels must be Some for explicit empty map");
assert!(labels.is_empty(), "labels must be an empty map");
}
#[test]
fn scenario_config_labels_deserialize_from_yaml() {
let yaml = r#"
name: metric_with_labels
rate: 10
generator:
type: constant
value: 1.0
labels:
zone: eu1
env: production
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let labels = config.labels.as_ref().expect("labels must be Some");
assert_eq!(labels.get("zone").map(String::as_str), Some("eu1"));
assert_eq!(labels.get("env").map(String::as_str), Some("production"));
}
#[test]
fn scenario_config_both_phase_offset_and_clock_group() {
let yaml = r#"
name: both_fields
rate: 10
generator:
type: constant
value: 1.0
phase_offset: "30s"
clock_group: compound-alert
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.phase_offset.as_deref(), Some("30s"));
assert_eq!(config.clock_group.as_deref(), Some("compound-alert"));
}
#[test]
fn scenario_entry_phase_offset_returns_value_for_metrics() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "accessor_test".to_string(),
rate: 10.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: Some("5s".to_string()),
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
assert_eq!(entry.phase_offset(), Some("5s"));
}
#[test]
fn scenario_entry_phase_offset_returns_none_for_metrics_without_offset() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "no_offset".to_string(),
rate: 10.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
assert_eq!(entry.phase_offset(), None);
}
#[test]
fn scenario_entry_phase_offset_returns_value_for_logs() {
let entry = ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: "log_accessor".to_string(),
rate: 10.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: Some("10s".to_string()),
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: LogGeneratorConfig::Template {
templates: vec![crate::generator::TemplateConfig {
message: "test".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(0),
},
encoder: EncoderConfig::JsonLines { precision: None },
});
assert_eq!(entry.phase_offset(), Some("10s"));
}
#[test]
fn scenario_entry_clock_group_returns_value_for_metrics() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "group_accessor".to_string(),
rate: 10.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: Some("my-group".to_string()),
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
assert_eq!(entry.clock_group(), Some("my-group"));
}
#[test]
fn scenario_entry_clock_group_returns_none_when_absent() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "no_group_acc".to_string(),
rate: 10.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
assert_eq!(entry.clock_group(), None);
}
#[test]
fn scenario_entry_base_returns_shared_config_for_metrics() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "base_test".to_string(),
rate: 42.0,
duration: Some("5s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
assert_eq!(entry.base().name, "base_test");
assert_eq!(entry.base().rate, 42.0);
}
#[test]
fn scenario_entry_base_returns_shared_config_for_logs() {
let entry = ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: "log_base".to_string(),
rate: 99.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: LogGeneratorConfig::Template {
templates: vec![crate::generator::TemplateConfig {
message: "test".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(0),
},
encoder: EncoderConfig::JsonLines { precision: None },
});
assert_eq!(entry.base().name, "log_base");
assert_eq!(entry.base().rate, 99.0);
}
#[test]
fn phase_offset_values_are_parseable_as_durations() {
use crate::config::validate::parse_duration;
let yaml = r#"
name: parse_test
rate: 10
generator:
type: constant
value: 1.0
phase_offset: "3s"
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let dur = parse_duration(config.phase_offset.as_deref().unwrap()).unwrap();
assert_eq!(dur, std::time::Duration::from_secs(3));
}
#[test]
fn scenario_config_cardinality_spikes_deserializes_from_yaml() {
let yaml = r#"
name: spike_test
rate: 10
generator:
type: constant
value: 1.0
cardinality_spikes:
- label: pod_name
every: 2m
for: 30s
cardinality: 500
strategy: counter
prefix: "pod-"
- label: error_msg
every: 5m
for: 1m
cardinality: 1000
strategy: random
seed: 42
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let spikes = config
.cardinality_spikes
.as_ref()
.expect("cardinality_spikes must be Some");
assert_eq!(spikes.len(), 2);
assert_eq!(spikes[0].label, "pod_name");
assert_eq!(spikes[0].cardinality, 500);
assert_eq!(spikes[0].strategy, SpikeStrategy::Counter);
assert_eq!(spikes[0].prefix.as_deref(), Some("pod-"));
assert_eq!(spikes[1].label, "error_msg");
assert_eq!(spikes[1].strategy, SpikeStrategy::Random);
assert_eq!(spikes[1].seed, Some(42));
}
#[test]
fn scenario_config_cardinality_spikes_defaults_to_none() {
let yaml = r#"
name: no_spike
rate: 10
generator:
type: constant
value: 1.0
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(
config.cardinality_spikes.is_none(),
"cardinality_spikes must be None when absent from YAML"
);
}
#[test]
fn spike_strategy_defaults_to_counter() {
let yaml = r#"
name: default_strategy
rate: 10
generator:
type: constant
value: 1.0
cardinality_spikes:
- label: pod_name
every: 1m
for: 10s
cardinality: 10
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let spikes = config.base.cardinality_spikes.unwrap();
assert_eq!(spikes[0].strategy, SpikeStrategy::Counter);
}
#[test]
fn log_scenario_config_cardinality_spikes_deserializes() {
let yaml = r#"
name: log_spike
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
cardinality_spikes:
- label: pod_name
every: 1m
for: 10s
cardinality: 100
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let spikes = config.base.cardinality_spikes.unwrap();
assert_eq!(spikes.len(), 1);
assert_eq!(spikes[0].label, "pod_name");
}
#[test]
fn backward_compatible_yaml_without_spikes() {
let yaml = r#"
name: compat_test
rate: 100
generator:
type: sine
amplitude: 5.0
period_secs: 30
offset: 10.0
labels:
hostname: t0-a1
gaps:
every: 2m
for: 20s
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.cardinality_spikes.is_none());
assert!(config.gaps.is_some());
assert_eq!(config.name, "compat_test");
}
#[test]
fn base_schedule_config_is_clone_and_debug() {
let base = BaseScheduleConfig {
name: "test".to_string(),
rate: 42.0,
duration: Some("10s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
};
let cloned = base.clone();
assert_eq!(cloned.name, "test");
assert_eq!(cloned.rate, 42.0);
let dbg = format!("{base:?}");
assert!(
dbg.contains("BaseScheduleConfig"),
"Debug output must contain type name"
);
}
#[test]
fn scenario_config_deref_accesses_base_fields() {
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "deref_test".to_string(),
rate: 99.0,
duration: Some("5s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: Some("1s".to_string()),
clock_group: Some("group-a".to_string()),
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
};
assert_eq!(config.name, "deref_test");
assert_eq!(config.rate, 99.0);
assert_eq!(config.duration.as_deref(), Some("5s"));
assert!(config.gaps.is_none());
assert_eq!(config.phase_offset.as_deref(), Some("1s"));
assert_eq!(config.clock_group.as_deref(), Some("group-a"));
}
#[test]
fn log_scenario_config_deref_accesses_base_fields() {
let config = LogScenarioConfig {
base: BaseScheduleConfig {
name: "log_deref".to_string(),
rate: 50.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: LogGeneratorConfig::Template {
templates: vec![crate::generator::TemplateConfig {
message: "test".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(0),
},
encoder: EncoderConfig::JsonLines { precision: None },
};
assert_eq!(config.name, "log_deref");
assert_eq!(config.rate, 50.0);
assert!(config.duration.is_none());
}
#[test]
fn scenario_config_deref_mut_allows_base_field_mutation() {
let mut config = ScenarioConfig {
base: BaseScheduleConfig {
name: "original".to_string(),
rate: 10.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
};
config.name = "mutated".to_string();
config.rate = 999.0;
config.duration = Some("30s".to_string());
assert_eq!(config.name, "mutated");
assert_eq!(config.rate, 999.0);
assert_eq!(config.duration.as_deref(), Some("30s"));
}
#[test]
fn scenario_config_flatten_deserializes_all_fields() {
let yaml = r#"
name: flatten_test
rate: 100
duration: 30s
generator:
type: sine
amplitude: 5.0
period_secs: 30
offset: 10.0
gaps:
every: 2m
for: 20s
bursts:
every: 10s
for: 2s
multiplier: 5.0
labels:
hostname: t0-a1
zone: eu1
encoder:
type: prometheus_text
sink:
type: stdout
phase_offset: "5s"
clock_group: correlation
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.name, "flatten_test");
assert_eq!(config.rate, 100.0);
assert_eq!(config.duration.as_deref(), Some("30s"));
assert!(config.gaps.is_some());
assert!(config.bursts.is_some());
let labels = config.labels.as_ref().unwrap();
assert_eq!(labels.get("hostname").map(String::as_str), Some("t0-a1"));
assert!(matches!(
config.encoder,
EncoderConfig::PrometheusText { .. }
));
assert!(matches!(config.base.sink, SinkConfig::Stdout));
assert_eq!(config.phase_offset.as_deref(), Some("5s"));
assert_eq!(config.clock_group.as_deref(), Some("correlation"));
}
#[test]
fn log_scenario_config_flatten_deserializes_all_fields() {
let yaml = r#"
name: log_flatten
rate: 20
duration: 60s
generator:
type: template
templates:
- message: "hello"
field_pools: {}
labels:
env: prod
encoder:
type: syslog
hostname: myhost
app_name: myapp
sink:
type: stdout
phase_offset: "2s"
clock_group: log-group
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.name, "log_flatten");
assert_eq!(config.rate, 20.0);
assert_eq!(config.duration.as_deref(), Some("60s"));
let labels = config.labels.as_ref().unwrap();
assert_eq!(labels.get("env").map(String::as_str), Some("prod"));
assert_eq!(config.phase_offset.as_deref(), Some("2s"));
assert_eq!(config.clock_group.as_deref(), Some("log-group"));
}
#[test]
fn scenario_config_encoder_defaults_to_prometheus_text() {
let yaml = r#"
name: enc_default
rate: 10
generator:
type: constant
value: 1.0
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(
matches!(config.encoder, EncoderConfig::PrometheusText { .. }),
"ScenarioConfig encoder default must be prometheus_text, got {:?}",
config.encoder
);
}
#[test]
fn log_scenario_config_encoder_defaults_to_json_lines() {
let yaml = r#"
name: log_enc_default
rate: 10
generator:
type: template
templates:
- message: "test"
field_pools: {}
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(
matches!(config.encoder, EncoderConfig::JsonLines { .. }),
"LogScenarioConfig encoder default must be json_lines, got {:?}",
config.encoder
);
}
#[test]
fn dynamic_labels_counter_deserializes_from_yaml() {
let yaml = r#"
name: test
rate: 10
generator:
type: constant
value: 1.0
dynamic_labels:
- key: hostname
prefix: "host-"
cardinality: 10
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let dls = config
.dynamic_labels
.as_ref()
.expect("dynamic_labels must be present");
assert_eq!(dls.len(), 1);
assert_eq!(dls[0].key, "hostname");
match &dls[0].strategy {
DynamicLabelStrategy::Counter {
prefix,
cardinality,
} => {
assert_eq!(prefix.as_deref(), Some("host-"));
assert_eq!(*cardinality, 10);
}
other => panic!("expected Counter strategy, got {other:?}"),
}
}
#[test]
fn dynamic_labels_values_list_deserializes_from_yaml() {
let yaml = r#"
name: test
rate: 10
generator:
type: constant
value: 1.0
dynamic_labels:
- key: region
values: [us-east-1, us-west-2, eu-west-1]
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let dls = config
.dynamic_labels
.as_ref()
.expect("dynamic_labels must be present");
assert_eq!(dls.len(), 1);
assert_eq!(dls[0].key, "region");
match &dls[0].strategy {
DynamicLabelStrategy::ValuesList { values } => {
assert_eq!(values, &["us-east-1", "us-west-2", "eu-west-1"]);
}
other => panic!("expected ValuesList strategy, got {other:?}"),
}
}
#[test]
fn dynamic_labels_defaults_to_none() {
let yaml = r#"
name: test
rate: 10
generator:
type: constant
value: 1.0
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.dynamic_labels.is_none());
}
#[test]
fn dynamic_labels_multiple_entries_deserialize() {
let yaml = r#"
name: test
rate: 10
generator:
type: constant
value: 1.0
dynamic_labels:
- key: hostname
prefix: "host-"
cardinality: 10
- key: region
values: [us-east, eu-west]
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let dls = config
.dynamic_labels
.as_ref()
.expect("dynamic_labels must be present");
assert_eq!(dls.len(), 2);
assert_eq!(dls[0].key, "hostname");
assert_eq!(dls[1].key, "region");
}
#[test]
fn dynamic_labels_on_log_config_deserializes() {
let yaml = r#"
name: test_logs
rate: 10
generator:
type: template
templates:
- message: "test event"
field_pools: {}
dynamic_labels:
- key: pod_name
prefix: "pod-"
cardinality: 5
"#;
let config: LogScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let dls = config
.dynamic_labels
.as_ref()
.expect("dynamic_labels must be present");
assert_eq!(dls.len(), 1);
assert_eq!(dls[0].key, "pod_name");
}
#[test]
fn dynamic_labels_counter_no_prefix_deserializes() {
let yaml = r#"
name: test
rate: 10
generator:
type: constant
value: 1.0
dynamic_labels:
- key: zone
cardinality: 3
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
let dls = config
.dynamic_labels
.as_ref()
.expect("dynamic_labels must be present");
match &dls[0].strategy {
DynamicLabelStrategy::Counter {
prefix,
cardinality,
} => {
assert!(prefix.is_none(), "prefix should be None when not specified");
assert_eq!(*cardinality, 3);
}
other => panic!("expected Counter strategy, got {other:?}"),
}
}
#[test]
fn dynamic_labels_and_static_labels_coexist() {
let yaml = r#"
name: test
rate: 10
generator:
type: constant
value: 1.0
labels:
env: prod
dynamic_labels:
- key: hostname
prefix: "host-"
cardinality: 5
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.labels.is_some(), "static labels must be present");
assert!(
config.dynamic_labels.is_some(),
"dynamic labels must be present"
);
let static_labels = config.labels.as_ref().unwrap();
assert_eq!(static_labels.get("env"), Some(&"prod".to_string()));
}
#[test]
fn csv_replay_columns_deserializes_from_yaml() {
let yaml = r#"
name: multi_col
rate: 1
generator:
type: csv_replay
file: data.csv
columns:
- index: 1
name: cpu_percent
- index: 2
name: mem_percent
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
match &config.generator {
GeneratorConfig::CsvReplay {
columns, column, ..
} => {
assert!(column.is_none(), "column is serde(skip), should be None");
let cols = columns.as_ref().expect("columns should be Some");
assert_eq!(cols.len(), 2);
assert_eq!(cols[0].index, 1);
assert_eq!(cols[0].name, "cpu_percent");
assert_eq!(cols[1].index, 2);
assert_eq!(cols[1].name, "mem_percent");
}
other => panic!("expected CsvReplay variant, got {other:?}"),
}
}
#[test]
fn csv_replay_without_columns_field_has_none() {
let yaml = r#"
name: single_col
rate: 1
generator:
type: csv_replay
file: data.csv
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
match &config.generator {
GeneratorConfig::CsvReplay {
columns, column, ..
} => {
assert_eq!(*column, None, "column is serde(skip)");
assert!(
columns.is_none(),
"columns should be None when not specified"
);
}
other => panic!("expected CsvReplay variant, got {other:?}"),
}
}
#[test]
fn scenario_entry_signal_type_name_covers_all_variants() {
let metrics_yaml = r#"
signal_type: metrics
name: cpu
rate: 1
generator:
type: constant
value: 1.0
"#;
let metrics: ScenarioEntry = serde_yaml_ng::from_str(metrics_yaml).unwrap();
assert_eq!(metrics.signal_type_name(), "metrics");
let logs_yaml = r#"
signal_type: logs
name: app_logs
rate: 1
generator:
type: replay
file: /tmp/does-not-need-to-exist.log
"#;
let logs: ScenarioEntry = serde_yaml_ng::from_str(logs_yaml).unwrap();
assert_eq!(logs.signal_type_name(), "logs");
let histogram_yaml = r#"
signal_type: histogram
name: req_latency
rate: 1
observations_per_tick: 100
buckets: [0.1, 0.5, 1.0]
distribution:
type: uniform
min: 0.0
max: 1.0
"#;
let histogram: ScenarioEntry = serde_yaml_ng::from_str(histogram_yaml).unwrap();
assert_eq!(histogram.signal_type_name(), "histogram");
let summary_yaml = r#"
signal_type: summary
name: req_latency_summary
rate: 1
observations_per_tick: 100
quantiles: [0.5, 0.9, 0.99]
distribution:
type: uniform
min: 0.0
max: 1.0
"#;
let summary: ScenarioEntry = serde_yaml_ng::from_str(summary_yaml).unwrap();
assert_eq!(summary.signal_type_name(), "summary");
}
}
#[cfg(test)]
mod expand_tests {
use super::*;
use crate::encoder::EncoderConfig;
use crate::generator::{CsvColumnSpec, GeneratorConfig};
use crate::sink::SinkConfig;
fn csv_replay_config(name: &str, columns: Option<Vec<CsvColumnSpec>>) -> ScenarioConfig {
ScenarioConfig {
base: BaseScheduleConfig {
name: name.to_string(),
rate: 10.0,
duration: Some("30s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: Some([("host".to_string(), "srv1".to_string())].into()),
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: Some(0.5),
jitter_seed: Some(42),
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::CsvReplay {
file: "data.csv".to_string(),
column: None,
repeat: Some(true),
columns,
},
encoder: EncoderConfig::PrometheusText { precision: None },
}
}
#[test]
fn auto_discover_from_header_when_no_columns() {
use std::io::Write;
let mut tmp = tempfile::NamedTempFile::new().expect("create temp file");
write!(tmp, "Time,cpu_usage\n1000,42.5\n").expect("write csv");
tmp.flush().expect("flush");
let path = tmp.path().to_string_lossy().into_owned();
let mut config = csv_replay_config("single_metric", None);
if let GeneratorConfig::CsvReplay { ref mut file, .. } = config.generator {
*file = path;
}
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 1, "should auto-discover 1 data column");
assert_eq!(result[0].name, "cpu_usage");
drop(tmp);
}
#[test]
fn no_columns_no_header_returns_error() {
use std::io::Write;
let mut tmp = tempfile::NamedTempFile::new().expect("create temp file");
write!(tmp, "1000,42.5\n2000,55.3\n").expect("write csv");
tmp.flush().expect("flush");
let path = tmp.path().to_string_lossy().into_owned();
let mut config = csv_replay_config("all_numeric", None);
if let GeneratorConfig::CsvReplay { ref mut file, .. } = config.generator {
*file = path;
}
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("no header row"),
"error must mention no header row, got: {msg}"
);
drop(tmp);
}
#[test]
fn non_csv_replay_passes_through() {
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "const_metric".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::Constant { value: 42.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
};
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 1);
assert_eq!(result[0].name, "const_metric");
}
#[test]
fn two_column_expansion() {
let cols = vec![
CsvColumnSpec {
index: 1,
name: "cpu_percent".to_string(),
labels: None,
},
CsvColumnSpec {
index: 2,
name: "mem_percent".to_string(),
labels: None,
},
];
let config = csv_replay_config("parent", Some(cols));
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 2, "should produce two expanded configs");
assert_eq!(result[0].name, "cpu_percent");
match &result[0].generator {
GeneratorConfig::CsvReplay {
column,
columns,
file,
repeat,
} => {
assert_eq!(*column, Some(1));
assert!(columns.is_none(), "columns must be None after expansion");
assert_eq!(file, "data.csv", "file must be inherited");
assert_eq!(*repeat, Some(true), "repeat must be inherited");
}
other => panic!("expected CsvReplay, got {other:?}"),
}
assert_eq!(result[1].name, "mem_percent");
match &result[1].generator {
GeneratorConfig::CsvReplay {
column, columns, ..
} => {
assert_eq!(*column, Some(2));
assert!(columns.is_none());
}
other => panic!("expected CsvReplay, got {other:?}"),
}
}
#[test]
fn three_column_expansion() {
let cols = vec![
CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: None,
},
CsvColumnSpec {
index: 2,
name: "mem".to_string(),
labels: None,
},
CsvColumnSpec {
index: 3,
name: "disk_io".to_string(),
labels: None,
},
];
let config = csv_replay_config("parent", Some(cols));
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 3);
assert_eq!(result[0].name, "cpu");
assert_eq!(result[1].name, "mem");
assert_eq!(result[2].name, "disk_io");
for (i, expected_col) in [(0, 1), (1, 2), (2, 3)] {
match &result[i].generator {
GeneratorConfig::CsvReplay { column, .. } => {
assert_eq!(*column, Some(expected_col), "config[{i}] column");
}
other => panic!("expected CsvReplay, got {other:?}"),
}
}
}
#[test]
fn expanded_configs_inherit_parent_fields() {
let cols = vec![CsvColumnSpec {
index: 1,
name: "metric_a".to_string(),
labels: None,
}];
let config = csv_replay_config("parent", Some(cols));
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 1);
let child = &result[0];
assert_eq!(child.rate, 10.0, "rate must be inherited");
assert_eq!(
child.duration.as_deref(),
Some("30s"),
"duration must be inherited"
);
let labels = child.labels.as_ref().expect("labels must be inherited");
assert_eq!(labels.get("host").map(|s| s.as_str()), Some("srv1"));
assert_eq!(child.jitter, Some(0.5));
assert_eq!(child.jitter_seed, Some(42));
assert!(matches!(
child.encoder,
EncoderConfig::PrometheusText { .. }
));
assert!(matches!(child.sink, SinkConfig::Stdout));
}
#[test]
fn expanded_configs_inherit_non_none_gaps_and_bursts() {
let cols = vec![CsvColumnSpec {
index: 1,
name: "metric_a".to_string(),
labels: None,
}];
let mut config = csv_replay_config("parent", Some(cols));
config.base.gaps = Some(GapConfig {
every: "2m".to_string(),
r#for: "20s".to_string(),
});
config.base.bursts = Some(BurstConfig {
every: "10s".to_string(),
r#for: "2s".to_string(),
multiplier: 3.0,
});
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 1);
let child = &result[0];
let gaps = child.gaps.as_ref().expect("gaps must be inherited");
assert_eq!(gaps.every, "2m");
assert_eq!(gaps.r#for, "20s");
let bursts = child.bursts.as_ref().expect("bursts must be inherited");
assert_eq!(bursts.every, "10s");
assert_eq!(bursts.r#for, "2s");
assert_eq!(bursts.multiplier, 3.0);
}
#[test]
fn empty_columns_list_returns_error() {
let config = csv_replay_config("empty", Some(vec![]));
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("must not be empty"),
"error must mention empty list, got: {msg}"
);
}
#[test]
fn duplicate_column_index_returns_error() {
let cols = vec![
CsvColumnSpec {
index: 2,
name: "cpu".to_string(),
labels: None,
},
CsvColumnSpec {
index: 2,
name: "mem".to_string(),
labels: None,
},
];
let config = csv_replay_config("dupe_idx", Some(cols));
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("duplicate column index 2"),
"error must mention duplicate index, got: {msg}"
);
}
#[test]
fn duplicate_column_index_not_first_returns_error() {
let cols = vec![
CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: None,
},
CsvColumnSpec {
index: 3,
name: "mem".to_string(),
labels: None,
},
CsvColumnSpec {
index: 3,
name: "disk".to_string(),
labels: None,
},
];
let config = csv_replay_config("dupe_idx_late", Some(cols));
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("duplicate column index 3"),
"error must mention duplicate index, got: {msg}"
);
}
#[test]
fn duplicate_column_name_returns_error() {
let cols = vec![
CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: None,
},
CsvColumnSpec {
index: 2,
name: "cpu".to_string(),
labels: None,
},
];
let config = csv_replay_config("dupe_name", Some(cols));
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("duplicate column name 'cpu'"),
"error must mention duplicate name, got: {msg}"
);
}
#[test]
fn duplicate_column_name_not_first_returns_error() {
let cols = vec![
CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: None,
},
CsvColumnSpec {
index: 2,
name: "mem".to_string(),
labels: None,
},
CsvColumnSpec {
index: 3,
name: "mem".to_string(),
labels: None,
},
];
let config = csv_replay_config("dupe_name_late", Some(cols));
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("duplicate column name 'mem'"),
"error must mention duplicate name, got: {msg}"
);
}
#[test]
fn expand_entry_metrics_two_columns() {
let cols = vec![
CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: None,
},
CsvColumnSpec {
index: 2,
name: "mem".to_string(),
labels: None,
},
];
let config = csv_replay_config("parent", Some(cols));
let entry = ScenarioEntry::Metrics(config);
let result = expand_entry(entry).expect("must succeed");
assert_eq!(result.len(), 2);
assert!(matches!(result[0], ScenarioEntry::Metrics(_)));
assert!(matches!(result[1], ScenarioEntry::Metrics(_)));
}
#[test]
fn expand_entry_logs_passes_through() {
use crate::generator::{LogGeneratorConfig, TemplateConfig};
use std::collections::BTreeMap;
let entry = ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: "app_logs".to_string(),
rate: 10.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: LogGeneratorConfig::Template {
templates: vec![TemplateConfig {
message: "test".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(0),
},
encoder: EncoderConfig::JsonLines { precision: None },
});
let result = expand_entry(entry).expect("must succeed");
assert_eq!(result.len(), 1);
assert!(matches!(result[0], ScenarioEntry::Logs(_)));
}
#[test]
fn per_column_labels_merge_into_child() {
let cols = vec![
CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: Some(
[("instance".to_string(), "host1".to_string())]
.into_iter()
.collect(),
),
},
CsvColumnSpec {
index: 2,
name: "mem".to_string(),
labels: Some(
[("instance".to_string(), "host2".to_string())]
.into_iter()
.collect(),
),
},
];
let config = csv_replay_config("parent", Some(cols));
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 2);
let labels0 = result[0].labels.as_ref().expect("labels must exist");
assert_eq!(labels0.get("instance").map(|s| s.as_str()), Some("host1"));
assert_eq!(labels0.get("host").map(|s| s.as_str()), Some("srv1"));
let labels1 = result[1].labels.as_ref().expect("labels must exist");
assert_eq!(labels1.get("instance").map(|s| s.as_str()), Some("host2"));
assert_eq!(labels1.get("host").map(|s| s.as_str()), Some("srv1"));
}
#[test]
fn per_column_labels_override_scenario_level_on_conflict() {
let cols = vec![CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: Some(
[("host".to_string(), "override-host".to_string())]
.into_iter()
.collect(),
),
}];
let config = csv_replay_config("parent", Some(cols));
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 1);
let labels = result[0].labels.as_ref().expect("labels must exist");
assert_eq!(
labels.get("host").map(|s| s.as_str()),
Some("override-host"),
"column labels must override scenario-level labels"
);
}
#[test]
fn columns_without_labels_preserve_scenario_labels() {
let cols = vec![CsvColumnSpec {
index: 1,
name: "cpu".to_string(),
labels: None,
}];
let config = csv_replay_config("parent", Some(cols));
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 1);
let labels = result[0].labels.as_ref().expect("labels must exist");
assert_eq!(
labels.get("host").map(|s| s.as_str()),
Some("srv1"),
"scenario-level labels must be preserved"
);
}
#[test]
fn auto_discovery_expands_from_csv_header() {
use std::io::Write;
let mut tmp = tempfile::NamedTempFile::new().expect("create temp file");
write!(tmp, "Time,cpu_usage,mem_usage\n1000,42.5,60.0\n").expect("write csv");
tmp.flush().expect("flush");
let path = tmp.path().to_string_lossy().into_owned();
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "auto_test".to_string(),
rate: 1.0,
duration: Some("60s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: Some(
[("env".to_string(), "test".to_string())]
.into_iter()
.collect(),
),
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::CsvReplay {
file: path,
column: None,
repeat: Some(true),
columns: None,
},
encoder: EncoderConfig::PrometheusText { precision: None },
};
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 2, "should expand to 2 columns (skip Time)");
assert_eq!(result[0].name, "cpu_usage");
assert_eq!(result[1].name, "mem_usage");
for child in &result {
let labels = child.labels.as_ref().expect("labels must be inherited");
assert_eq!(labels.get("env").map(|s| s.as_str()), Some("test"));
}
match &result[0].generator {
GeneratorConfig::CsvReplay {
column, columns, ..
} => {
assert_eq!(*column, Some(1));
assert!(columns.is_none());
}
other => panic!("expected CsvReplay, got {other:?}"),
}
match &result[1].generator {
GeneratorConfig::CsvReplay { column, .. } => {
assert_eq!(*column, Some(2));
}
other => panic!("expected CsvReplay, got {other:?}"),
}
drop(tmp);
}
#[test]
fn auto_discovery_grafana_style_extracts_labels() {
use std::io::Write;
let mut tmp = tempfile::NamedTempFile::new().expect("create temp file");
let header = r#""Time","{__name__=""up"", instance=""host1"", job=""prom""}","{__name__=""up"", instance=""host2"", job=""node""}""#;
write!(tmp, "{header}\n1704067200000,1,1\n").expect("write csv");
tmp.flush().expect("flush");
let path = tmp.path().to_string_lossy().into_owned();
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "grafana_auto".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: Some(
[("env".to_string(), "production".to_string())]
.into_iter()
.collect(),
),
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::CsvReplay {
file: path,
column: None,
repeat: Some(true),
columns: None,
},
encoder: EncoderConfig::PrometheusText { precision: None },
};
let result = expand_scenario(config).expect("must succeed");
assert_eq!(result.len(), 2);
assert_eq!(result[0].name, "up");
assert_eq!(result[1].name, "up");
let labels0 = result[0].labels.as_ref().expect("labels must exist");
assert_eq!(labels0.get("instance").map(|s| s.as_str()), Some("host1"));
assert_eq!(labels0.get("job").map(|s| s.as_str()), Some("prom"));
assert_eq!(labels0.get("env").map(|s| s.as_str()), Some("production"));
let labels1 = result[1].labels.as_ref().expect("labels must exist");
assert_eq!(labels1.get("instance").map(|s| s.as_str()), Some("host2"));
assert_eq!(labels1.get("job").map(|s| s.as_str()), Some("node"));
assert_eq!(labels1.get("env").map(|s| s.as_str()), Some("production"));
drop(tmp);
}
#[test]
fn auto_discovery_single_column_file_returns_error() {
use std::io::Write;
let mut tmp = tempfile::NamedTempFile::new().expect("create temp file");
write!(tmp, "Time\n1000\n").expect("write csv");
tmp.flush().expect("flush");
let path = tmp.path().to_string_lossy().into_owned();
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "no_data_cols".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::CsvReplay {
file: path,
column: None,
repeat: Some(true),
columns: None,
},
encoder: EncoderConfig::PrometheusText { precision: None },
};
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("no data columns"),
"error must mention no data columns, got: {msg}"
);
drop(tmp);
}
#[test]
fn auto_discovery_single_data_column_no_time_yields_no_data_columns() {
use std::io::Write;
let mut tmp = tempfile::NamedTempFile::new().expect("create temp file");
write!(tmp, "metric_name\n42.5\n").expect("write csv");
tmp.flush().expect("flush");
let path = tmp.path().to_string_lossy().into_owned();
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "single_data_col".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::CsvReplay {
file: path,
column: None,
repeat: Some(true),
columns: None,
},
encoder: EncoderConfig::PrometheusText { precision: None },
};
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("no data columns"),
"error must mention no data columns, got: {msg}"
);
drop(tmp);
}
#[test]
fn auto_discovery_missing_file_returns_generator_error() {
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "missing_file".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::CsvReplay {
file: "/nonexistent/path.csv".to_string(),
column: None,
repeat: Some(true),
columns: None,
},
encoder: EncoderConfig::PrometheusText { precision: None },
};
let err = expand_scenario(config).expect_err("must fail");
assert!(
matches!(err, SondaError::Generator(_)),
"missing file should be a Generator error, got: {err:?}"
);
}
#[test]
fn auto_discovery_all_numeric_returns_error() {
use std::io::Write;
let mut tmp = tempfile::NamedTempFile::new().expect("create temp file");
write!(tmp, "1000,42.5,60.0\n2000,55.3,70.1\n").expect("write csv");
tmp.flush().expect("flush");
let path = tmp.path().to_string_lossy().into_owned();
let config = ScenarioConfig {
base: BaseScheduleConfig {
name: "no_header".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
dynamic_labels: None,
on_sink_error: crate::OnSinkError::Warn,
},
generator: GeneratorConfig::CsvReplay {
file: path,
column: None,
repeat: Some(true),
columns: None,
},
encoder: EncoderConfig::PrometheusText { precision: None },
};
let err = expand_scenario(config).expect_err("must fail");
let msg = err.to_string();
assert!(
msg.contains("no header row"),
"error must mention no header row, got: {msg}"
);
drop(tmp);
}
#[cfg(feature = "config")]
#[test]
fn deserialize_per_column_labels_from_yaml() {
let yaml = r#"
name: labeled_cols
rate: 1
generator:
type: csv_replay
file: data.csv
columns:
- index: 1
name: cpu_percent
labels:
instance: host1
job: node
- index: 2
name: mem_percent
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
match &config.generator {
GeneratorConfig::CsvReplay { columns, .. } => {
let cols = columns.as_ref().expect("columns should be Some");
assert_eq!(cols.len(), 2);
let labels0 = cols[0].labels.as_ref().expect("col 0 labels must be Some");
assert_eq!(labels0.get("instance").map(|s| s.as_str()), Some("host1"));
assert_eq!(labels0.get("job").map(|s| s.as_str()), Some("node"));
assert!(cols[1].labels.is_none());
}
other => panic!("expected CsvReplay variant, got {other:?}"),
}
}
#[test]
#[cfg(feature = "config")]
fn histogram_config_deserializes_from_yaml() {
let yaml = r#"
name: http_request_duration_seconds
rate: 1
duration: 5m
buckets: [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0]
distribution:
type: exponential
rate: 10.0
observations_per_tick: 100
mean_shift_per_sec: 0.001
seed: 42
labels:
method: GET
"#;
let config: HistogramScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.name, "http_request_duration_seconds");
assert_eq!(config.rate, 1.0);
assert_eq!(config.buckets.as_ref().unwrap().len(), 11);
assert_eq!(config.observations_per_tick, Some(100));
assert_eq!(config.mean_shift_per_sec, Some(0.001));
assert_eq!(config.seed, Some(42));
}
#[test]
#[cfg(feature = "config")]
fn histogram_config_defaults_when_omitted() {
let yaml = r#"
name: latency
rate: 1
distribution:
type: exponential
rate: 5.0
"#;
let config: HistogramScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.buckets.is_none());
assert!(config.observations_per_tick.is_none());
assert!(config.mean_shift_per_sec.is_none());
assert!(config.seed.is_none());
}
#[test]
#[cfg(feature = "config")]
fn histogram_config_normal_distribution() {
let yaml = r#"
name: latency
rate: 1
distribution:
type: normal
mean: 0.1
stddev: 0.02
"#;
let config: HistogramScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
match config.distribution {
DistributionConfig::Normal { mean, stddev } => {
assert_eq!(mean, 0.1);
assert_eq!(stddev, 0.02);
}
_ => panic!("expected Normal distribution"),
}
}
#[test]
#[cfg(feature = "config")]
fn histogram_config_uniform_distribution() {
let yaml = r#"
name: latency
rate: 1
distribution:
type: uniform
min: 0.0
max: 1.0
"#;
let config: HistogramScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
match config.distribution {
DistributionConfig::Uniform { min, max } => {
assert_eq!(min, 0.0);
assert_eq!(max, 1.0);
}
_ => panic!("expected Uniform distribution"),
}
}
#[test]
#[cfg(feature = "config")]
fn summary_config_deserializes_from_yaml() {
let yaml = r#"
name: rpc_duration_seconds
rate: 1
duration: 5m
quantiles: [0.5, 0.9, 0.95, 0.99]
distribution:
type: normal
mean: 0.1
stddev: 0.02
observations_per_tick: 100
seed: 42
"#;
let config: SummaryScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.name, "rpc_duration_seconds");
assert_eq!(config.rate, 1.0);
assert_eq!(config.quantiles.as_ref().unwrap().len(), 4);
assert_eq!(config.observations_per_tick, Some(100));
assert_eq!(config.seed, Some(42));
}
#[test]
#[cfg(feature = "config")]
fn summary_config_defaults_when_omitted() {
let yaml = r#"
name: rpc_latency
rate: 1
distribution:
type: exponential
rate: 5.0
"#;
let config: SummaryScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(config.quantiles.is_none());
assert!(config.observations_per_tick.is_none());
assert!(config.seed.is_none());
}
#[test]
#[cfg(feature = "config")]
fn scenario_entry_base_works_for_histogram() {
let yaml = r#"
signal_type: histogram
name: test_hist
rate: 5
distribution:
type: exponential
rate: 10.0
"#;
let entry: ScenarioEntry = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(entry.base().name, "test_hist");
assert_eq!(entry.base().rate, 5.0);
}
#[test]
#[cfg(feature = "config")]
fn scenario_entry_base_works_for_summary() {
let yaml = r#"
signal_type: summary
name: test_sum
rate: 5
distribution:
type: normal
mean: 0.1
stddev: 0.02
"#;
let entry: ScenarioEntry = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(entry.base().name, "test_sum");
assert_eq!(entry.base().rate, 5.0);
}
#[test]
fn expand_entry_passes_through_histogram() {
let entry = ScenarioEntry::Histogram(HistogramScenarioConfig {
base: BaseScheduleConfig {
name: "test_hist".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: crate::sink::SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
buckets: None,
distribution: DistributionConfig::Exponential { rate: 10.0 },
observations_per_tick: None,
mean_shift_per_sec: None,
seed: None,
encoder: EncoderConfig::PrometheusText { precision: None },
});
let result = expand_entry(entry).expect("must succeed");
assert_eq!(result.len(), 1);
assert!(matches!(result[0], ScenarioEntry::Histogram(_)));
}
#[test]
fn expand_entry_passes_through_summary() {
let entry = ScenarioEntry::Summary(SummaryScenarioConfig {
base: BaseScheduleConfig {
name: "test_sum".to_string(),
rate: 1.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: crate::sink::SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
clock_group_is_auto: None,
jitter: None,
jitter_seed: None,
on_sink_error: crate::OnSinkError::Warn,
},
quantiles: None,
distribution: DistributionConfig::Normal {
mean: 0.1,
stddev: 0.02,
},
observations_per_tick: None,
mean_shift_per_sec: None,
seed: None,
encoder: EncoderConfig::PrometheusText { precision: None },
});
let result = expand_entry(entry).expect("must succeed");
assert_eq!(result.len(), 1);
assert!(matches!(result[0], ScenarioEntry::Summary(_)));
}
}