use std::collections::BTreeMap;
use super::{AfterClause, Defaults, Entry, ScenarioFile};
use crate::config::{
BurstConfig, CardinalitySpikeConfig, DistributionConfig, DynamicLabelConfig, GapConfig,
};
use crate::encoder::EncoderConfig;
use crate::generator::{GeneratorConfig, LogGeneratorConfig};
use crate::packs::MetricOverride;
use crate::sink::SinkConfig;
#[derive(Debug, thiserror::Error)]
#[non_exhaustive]
pub enum NormalizeError {
#[error("entry {index} ({label}): missing required field 'rate' (set it on the entry or in defaults:)")]
MissingRate {
index: usize,
label: String,
},
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize))]
pub struct NormalizedFile {
pub version: u32,
#[cfg_attr(feature = "config", serde(skip_serializing_if = "Option::is_none"))]
pub defaults_labels: Option<BTreeMap<String, String>>,
pub entries: Vec<NormalizedEntry>,
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Serialize))]
pub struct NormalizedEntry {
pub id: Option<String>,
pub signal_type: String,
pub name: Option<String>,
pub rate: f64,
pub duration: Option<String>,
pub generator: Option<GeneratorConfig>,
pub log_generator: Option<LogGeneratorConfig>,
pub labels: Option<BTreeMap<String, String>>,
pub dynamic_labels: Option<Vec<DynamicLabelConfig>>,
pub encoder: EncoderConfig,
pub sink: SinkConfig,
pub jitter: Option<f64>,
pub jitter_seed: Option<u64>,
pub gaps: Option<GapConfig>,
pub bursts: Option<BurstConfig>,
pub cardinality_spikes: Option<Vec<CardinalitySpikeConfig>>,
pub phase_offset: Option<String>,
pub clock_group: Option<String>,
pub after: Option<AfterClause>,
pub pack: Option<String>,
pub overrides: Option<BTreeMap<String, MetricOverride>>,
pub distribution: Option<DistributionConfig>,
pub buckets: Option<Vec<f64>>,
pub quantiles: Option<Vec<f64>>,
pub observations_per_tick: Option<u32>,
pub mean_shift_per_sec: Option<f64>,
pub seed: Option<u64>,
}
pub fn normalize(file: ScenarioFile) -> Result<NormalizedFile, NormalizeError> {
let defaults = file.defaults;
let defaults_labels = defaults
.as_ref()
.and_then(|d| d.labels.as_ref())
.filter(|m| !m.is_empty())
.cloned();
let mut entries = Vec::with_capacity(file.scenarios.len());
for (index, entry) in file.scenarios.into_iter().enumerate() {
entries.push(normalize_entry(entry, index, defaults.as_ref())?);
}
Ok(NormalizedFile {
version: file.version,
defaults_labels,
entries,
})
}
fn normalize_entry(
entry: Entry,
index: usize,
defaults: Option<&Defaults>,
) -> Result<NormalizedEntry, NormalizeError> {
let rate = resolve_rate(&entry, defaults, index)?;
let duration = entry
.duration
.or_else(|| defaults.and_then(|d| d.duration.clone()));
let encoder = entry
.encoder
.or_else(|| defaults.and_then(|d| d.encoder.clone()))
.unwrap_or_else(|| default_encoder_for(&entry.signal_type));
let sink = entry
.sink
.or_else(|| defaults.and_then(|d| d.sink.clone()))
.unwrap_or_else(default_sink);
let labels = if entry.pack.is_some() {
entry.labels
} else {
merge_labels(defaults.and_then(|d| d.labels.as_ref()), entry.labels)
};
Ok(NormalizedEntry {
id: entry.id,
signal_type: entry.signal_type,
name: entry.name,
rate,
duration,
generator: entry.generator,
log_generator: entry.log_generator,
labels,
dynamic_labels: entry.dynamic_labels,
encoder,
sink,
jitter: entry.jitter,
jitter_seed: entry.jitter_seed,
gaps: entry.gaps,
bursts: entry.bursts,
cardinality_spikes: entry.cardinality_spikes,
phase_offset: entry.phase_offset,
clock_group: entry.clock_group,
after: entry.after,
pack: entry.pack,
overrides: entry.overrides,
distribution: entry.distribution,
buckets: entry.buckets,
quantiles: entry.quantiles,
observations_per_tick: entry.observations_per_tick,
mean_shift_per_sec: entry.mean_shift_per_sec,
seed: entry.seed,
})
}
fn resolve_rate(
entry: &Entry,
defaults: Option<&Defaults>,
index: usize,
) -> Result<f64, NormalizeError> {
if let Some(rate) = entry.rate {
return Ok(rate);
}
if let Some(rate) = defaults.and_then(|d| d.rate) {
return Ok(rate);
}
Err(NormalizeError::MissingRate {
index,
label: entry_label(entry),
})
}
fn entry_label(entry: &Entry) -> String {
entry
.name
.clone()
.or_else(|| entry.id.clone())
.or_else(|| entry.pack.clone())
.unwrap_or_else(|| "<unnamed>".to_string())
}
fn default_encoder_for(signal_type: &str) -> EncoderConfig {
match signal_type {
"logs" => EncoderConfig::JsonLines { precision: None },
_ => EncoderConfig::PrometheusText { precision: None },
}
}
fn default_sink() -> SinkConfig {
SinkConfig::Stdout
}
fn merge_labels(
defaults_labels: Option<&BTreeMap<String, String>>,
entry_labels: Option<BTreeMap<String, String>>,
) -> Option<BTreeMap<String, String>> {
match (defaults_labels, entry_labels) {
(None, None) => None,
(Some(d), None) => Some(d.clone()),
(None, Some(e)) => Some(e),
(Some(d), Some(e)) => {
let mut merged = d.clone();
for (k, v) in e {
merged.insert(k, v);
}
Some(merged)
}
}
}
#[cfg(test)]
mod tests {
use super::super::parse::parse;
use super::*;
fn normalize_yaml(yaml: &str) -> Result<NormalizedFile, NormalizeError> {
let parsed = parse(yaml).expect("parse must succeed in normalization tests");
normalize(parsed)
}
fn is_prometheus_text(encoder: &EncoderConfig) -> bool {
matches!(encoder, EncoderConfig::PrometheusText { .. })
}
fn is_json_lines(encoder: &EncoderConfig) -> bool {
matches!(encoder, EncoderConfig::JsonLines { .. })
}
fn is_stdout(sink: &SinkConfig) -> bool {
matches!(sink, SinkConfig::Stdout)
}
#[test]
fn entry_inherits_rate_and_duration_from_defaults() {
let yaml = r#"
version: 2
defaults:
rate: 1
duration: 5m
scenarios:
- signal_type: metrics
name: cpu
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
let entry = &file.entries[0];
assert!((entry.rate - 1.0).abs() < f64::EPSILON);
assert_eq!(entry.duration.as_deref(), Some("5m"));
}
#[test]
fn entry_rate_overrides_defaults_rate() {
let yaml = r#"
version: 2
defaults:
rate: 1
scenarios:
- signal_type: metrics
name: cpu
rate: 10
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
assert!((file.entries[0].rate - 10.0).abs() < f64::EPSILON);
}
#[test]
fn entry_duration_overrides_defaults_duration() {
let yaml = r#"
version: 2
defaults:
rate: 1
duration: 5m
scenarios:
- signal_type: metrics
name: cpu
duration: 30s
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
assert_eq!(file.entries[0].duration.as_deref(), Some("30s"));
}
#[test]
fn entry_inherits_encoder_and_sink_from_defaults() {
let yaml = r#"
version: 2
defaults:
rate: 1
encoder: { type: influx_lp }
sink: { type: file, path: /tmp/out.txt }
scenarios:
- signal_type: metrics
name: cpu
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
let entry = &file.entries[0];
assert!(matches!(
entry.encoder,
EncoderConfig::InfluxLineProtocol { .. }
));
assert!(matches!(entry.sink, SinkConfig::File { .. }));
}
#[test]
fn entry_encoder_overrides_defaults_encoder() {
let yaml = r#"
version: 2
defaults:
rate: 1
encoder: { type: influx_lp }
scenarios:
- signal_type: metrics
name: cpu
encoder: { type: prometheus_text }
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
assert!(is_prometheus_text(&file.entries[0].encoder));
}
#[derive(Copy, Clone)]
enum ExpectedEncoder {
PrometheusText,
JsonLines,
}
#[rustfmt::skip]
#[rstest::rstest]
#[case::metrics(r#"
version: 2
scenarios:
- signal_type: metrics
name: cpu
rate: 1
generator: { type: constant, value: 42 }
"#, ExpectedEncoder::PrometheusText)]
#[case::histogram(r#"
version: 2
scenarios:
- signal_type: histogram
name: http_latency
rate: 1
distribution: { type: exponential, rate: 10.0 }
buckets: [0.1, 0.5, 1.0]
observations_per_tick: 50
seed: 1
"#, ExpectedEncoder::PrometheusText)]
#[case::summary(r#"
version: 2
scenarios:
- signal_type: summary
name: rpc_latency
rate: 1
distribution: { type: normal, mean: 0.1, stddev: 0.02 }
quantiles: [0.5, 0.9, 0.99]
observations_per_tick: 50
seed: 1
"#, ExpectedEncoder::PrometheusText)]
#[case::logs(r#"
version: 2
scenarios:
- signal_type: logs
name: app_logs
rate: 1
log_generator:
type: template
templates:
- message: "hello"
"#, ExpectedEncoder::JsonLines)]
fn signal_type_picks_built_in_encoder_and_stdout_sink(
#[case] yaml: &str,
#[case] expected: ExpectedEncoder,
) {
let file = normalize_yaml(yaml).expect("must normalize");
let entry = &file.entries[0];
match expected {
ExpectedEncoder::PrometheusText => assert!(is_prometheus_text(&entry.encoder)),
ExpectedEncoder::JsonLines => assert!(is_json_lines(&entry.encoder)),
}
assert!(is_stdout(&entry.sink));
}
#[test]
fn labels_merge_entry_wins_on_conflict() {
let yaml = r#"
version: 2
defaults:
rate: 1
labels:
device: rtr-edge-01
region: us-west-2
scenarios:
- signal_type: metrics
name: cpu
labels:
region: us-east-1
interface: Gi0/0/0
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
let labels = file.entries[0]
.labels
.as_ref()
.expect("merged labels must exist");
assert_eq!(
labels.get("device").map(String::as_str),
Some("rtr-edge-01")
);
assert_eq!(
labels.get("region").map(String::as_str),
Some("us-east-1"),
"entry value must win on conflict"
);
assert_eq!(labels.get("interface").map(String::as_str), Some("Gi0/0/0"));
}
#[test]
fn labels_from_defaults_alone_are_preserved() {
let yaml = r#"
version: 2
defaults:
rate: 1
labels:
env: staging
scenarios:
- signal_type: metrics
name: cpu
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
let labels = file.entries[0].labels.as_ref().expect("labels must exist");
assert_eq!(labels.get("env").map(String::as_str), Some("staging"));
assert_eq!(labels.len(), 1);
}
#[test]
fn entry_labels_preserved_when_defaults_has_no_labels() {
let yaml = r#"
version: 2
defaults:
rate: 1
scenarios:
- signal_type: metrics
name: cpu
labels:
job: api
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
let labels = file.entries[0].labels.as_ref().expect("labels must exist");
assert_eq!(labels.get("job").map(String::as_str), Some("api"));
assert_eq!(labels.len(), 1);
}
#[test]
fn no_labels_anywhere_produces_none() {
let yaml = r#"
version: 2
scenarios:
- signal_type: metrics
name: cpu
rate: 1
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize");
assert!(file.entries[0].labels.is_none());
}
#[rustfmt::skip]
#[rstest::rstest]
#[case::inline_uses_name(r#"
version: 2
scenarios:
- signal_type: metrics
name: cpu
generator: { type: constant, value: 1.0 }
"#, "cpu")]
#[case::pack_prefers_id(r#"
version: 2
scenarios:
- id: snmp_iface
signal_type: metrics
pack: telegraf_snmp_interface
"#, "snmp_iface")]
#[case::pack_falls_back_to_pack_name(r#"
version: 2
scenarios:
- signal_type: metrics
pack: telegraf_snmp_interface
"#, "telegraf_snmp_interface")]
fn missing_rate_error_label_follows_priority_chain(
#[case] yaml: &str,
#[case] expected_label: &str,
) {
let err = normalize_yaml(yaml).expect_err("missing rate must fail");
match err {
NormalizeError::MissingRate { index, label } => {
assert_eq!(index, 0);
assert_eq!(label, expected_label);
}
}
}
#[test]
fn missing_rate_message_mentions_entry_and_hint() {
let yaml = r#"
version: 2
scenarios:
- signal_type: metrics
name: bare
generator: { type: constant, value: 1.0 }
"#;
let err = normalize_yaml(yaml).expect_err("missing rate must fail");
let msg = err.to_string();
assert!(msg.contains("entry 0"), "error should mention entry index");
assert!(msg.contains("bare"), "error should mention entry name");
assert!(msg.contains("rate"), "error should mention rate");
assert!(
msg.contains("defaults"),
"error should hint at defaults block"
);
}
#[test]
fn shorthand_single_signal_normalizes_through_wrapped_form() {
let yaml = r#"
version: 2
name: cpu_usage
signal_type: metrics
rate: 5
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml).expect("must normalize shorthand");
assert_eq!(file.entries.len(), 1);
let entry = &file.entries[0];
assert!((entry.rate - 5.0).abs() < f64::EPSILON);
assert_eq!(entry.name.as_deref(), Some("cpu_usage"));
assert!(is_prometheus_text(&entry.encoder));
assert!(is_stdout(&entry.sink));
}
#[test]
fn shorthand_logs_signal_picks_json_lines_default() {
let yaml = r#"
version: 2
name: app_logs
signal_type: logs
rate: 2
log_generator:
type: template
templates:
- message: "hello"
"#;
let file = normalize_yaml(yaml).expect("must normalize logs shorthand");
assert!(is_json_lines(&file.entries[0].encoder));
}
#[test]
fn pack_entry_inherits_defaults_but_defers_label_merge() {
let yaml = r#"
version: 2
defaults:
rate: 1
duration: 10m
encoder: { type: prometheus_text }
sink: { type: stdout }
labels:
job: web
scenarios:
- id: primary_uplink
signal_type: metrics
pack: mypack
labels:
device: rtr-01
overrides:
ifOperStatus:
generator: { type: constant, value: 0.0 }
"#;
let file = normalize_yaml(yaml).expect("must normalize pack entry");
let entry = &file.entries[0];
assert_eq!(entry.pack.as_deref(), Some("mypack"));
assert!(
entry.overrides.is_some(),
"overrides must be carried through untouched"
);
assert!((entry.rate - 1.0).abs() < f64::EPSILON);
assert_eq!(entry.duration.as_deref(), Some("10m"));
assert!(is_prometheus_text(&entry.encoder));
assert!(is_stdout(&entry.sink));
let labels = entry.labels.as_ref().expect("entry labels must exist");
assert_eq!(labels.len(), 1, "only entry labels — defaults not merged");
assert_eq!(labels.get("device").map(String::as_str), Some("rtr-01"));
assert!(
!labels.contains_key("job"),
"defaults.labels must not leak into pack entry's labels"
);
let d = file
.defaults_labels
.as_ref()
.expect("defaults_labels must be surfaced");
assert_eq!(d.get("job").map(String::as_str), Some("web"));
}
#[test]
fn normalized_file_defaults_labels_matches_source() {
let yaml_with = r#"
version: 2
defaults:
rate: 1
labels:
env: prod
region: us-east-1
scenarios:
- signal_type: metrics
name: cpu
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml_with).expect("must normalize");
let d = file
.defaults_labels
.as_ref()
.expect("defaults_labels must be Some when defaults.labels is set");
assert_eq!(d.len(), 2);
assert_eq!(d.get("env").map(String::as_str), Some("prod"));
assert_eq!(d.get("region").map(String::as_str), Some("us-east-1"));
let yaml_no_defaults = r#"
version: 2
scenarios:
- signal_type: metrics
name: cpu
rate: 1
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml_no_defaults).expect("must normalize");
assert!(file.defaults_labels.is_none());
let yaml_no_labels = r#"
version: 2
defaults:
rate: 1
duration: 5m
scenarios:
- signal_type: metrics
name: cpu
generator: { type: constant, value: 42 }
"#;
let file = normalize_yaml(yaml_no_labels).expect("must normalize");
assert!(file.defaults_labels.is_none());
}
#[test]
fn inline_and_pack_entries_compose_defaults_labels_asymmetrically() {
let yaml = r#"
version: 2
defaults:
rate: 1
labels:
job: web
region: us-east-1
scenarios:
- signal_type: metrics
name: cpu
labels:
host: node-01
generator: { type: constant, value: 42 }
- signal_type: metrics
pack: mypack
labels:
device: rtr-01
"#;
let file = normalize_yaml(yaml).expect("must normalize");
assert_eq!(file.entries.len(), 2);
let inline = &file.entries[0];
assert!(inline.pack.is_none());
let inline_labels = inline.labels.as_ref().expect("inline labels must exist");
assert_eq!(inline_labels.len(), 3, "defaults + entry merged");
assert_eq!(inline_labels.get("job").map(String::as_str), Some("web"));
assert_eq!(
inline_labels.get("region").map(String::as_str),
Some("us-east-1")
);
assert_eq!(
inline_labels.get("host").map(String::as_str),
Some("node-01")
);
let pack = &file.entries[1];
assert_eq!(pack.pack.as_deref(), Some("mypack"));
let pack_labels = pack.labels.as_ref().expect("pack entry labels must exist");
assert_eq!(pack_labels.len(), 1, "only entry-level labels, no merge");
assert_eq!(
pack_labels.get("device").map(String::as_str),
Some("rtr-01")
);
assert!(!pack_labels.contains_key("job"));
assert!(!pack_labels.contains_key("region"));
let d = file
.defaults_labels
.as_ref()
.expect("defaults_labels must be Some");
assert_eq!(d.len(), 2);
assert_eq!(d.get("job").map(String::as_str), Some("web"));
assert_eq!(d.get("region").map(String::as_str), Some("us-east-1"));
}
#[test]
fn multi_scenario_mixed_entries_all_normalize() {
let yaml = r#"
version: 2
defaults:
rate: 1
duration: 5m
encoder: { type: prometheus_text }
sink: { type: stdout }
labels:
region: us-west-2
scenarios:
- id: link_state
signal_type: metrics
name: interface_oper_state
labels:
interface: Gi0/0/0
region: us-east-1
generator: { type: flap, up_duration: 60s, down_duration: 30s }
- id: fast_metric
signal_type: metrics
name: cpu
rate: 10
generator: { type: constant, value: 42 }
- signal_type: logs
name: app_logs
log_generator:
type: template
templates:
- message: "hello"
- signal_type: metrics
pack: telegraf_snmp_interface
labels:
device: rtr-01
"#;
let file = normalize_yaml(yaml).expect("must normalize multi-scenario");
assert_eq!(file.entries.len(), 4);
let e0 = &file.entries[0];
assert!((e0.rate - 1.0).abs() < f64::EPSILON);
assert_eq!(e0.duration.as_deref(), Some("5m"));
assert!(is_prometheus_text(&e0.encoder));
let labels0 = e0.labels.as_ref().expect("labels must exist");
assert_eq!(labels0.get("region").map(String::as_str), Some("us-east-1"));
assert_eq!(
labels0.get("interface").map(String::as_str),
Some("Gi0/0/0")
);
let e1 = &file.entries[1];
assert!((e1.rate - 10.0).abs() < f64::EPSILON);
assert_eq!(e1.duration.as_deref(), Some("5m"));
let labels1 = e1.labels.as_ref().expect("labels must exist");
assert_eq!(
labels1.get("region").map(String::as_str),
Some("us-west-2"),
"entry has no labels.region, defaults wins"
);
let e2 = &file.entries[2];
assert!(
is_prometheus_text(&e2.encoder),
"explicit defaults.encoder applies to all entries including logs"
);
let e3 = &file.entries[3];
assert_eq!(e3.pack.as_deref(), Some("telegraf_snmp_interface"));
let labels3 = e3.labels.as_ref().expect("labels must exist");
assert_eq!(labels3.len(), 1, "only entry-level labels on pack entry");
assert_eq!(labels3.get("device").map(String::as_str), Some("rtr-01"));
assert!(!labels3.contains_key("region"));
let d = file
.defaults_labels
.as_ref()
.expect("defaults_labels must be Some");
assert_eq!(d.get("region").map(String::as_str), Some("us-west-2"));
}
#[test]
fn after_clause_and_timing_fields_preserved() {
let yaml = r#"
version: 2
defaults:
rate: 1
scenarios:
- id: src
signal_type: metrics
name: source
generator: { type: constant, value: 100.0 }
- signal_type: metrics
name: dependent
phase_offset: 5s
clock_group: group_a
generator: { type: constant, value: 1.0 }
after:
ref: src
op: ">"
value: 50.0
delay: 2s
"#;
let file = normalize_yaml(yaml).expect("must normalize");
let dep = &file.entries[1];
assert_eq!(dep.phase_offset.as_deref(), Some("5s"));
assert_eq!(dep.clock_group.as_deref(), Some("group_a"));
let after = dep.after.as_ref().expect("after must be preserved");
assert_eq!(after.ref_id, "src");
assert_eq!(after.delay.as_deref(), Some("2s"));
}
#[test]
fn histogram_fields_preserved() {
let yaml = r#"
version: 2
defaults:
rate: 1
scenarios:
- signal_type: histogram
name: latency
distribution: { type: exponential, rate: 10.0 }
buckets: [0.1, 0.5, 1.0]
observations_per_tick: 100
mean_shift_per_sec: 0.01
seed: 42
"#;
let file = normalize_yaml(yaml).expect("must normalize");
let entry = &file.entries[0];
assert!(entry.distribution.is_some());
assert_eq!(entry.buckets.as_ref().map(Vec::len), Some(3));
assert_eq!(entry.observations_per_tick, Some(100));
assert_eq!(entry.mean_shift_per_sec, Some(0.01));
assert_eq!(entry.seed, Some(42));
}
#[test]
fn normalize_error_is_send_and_sync() {
fn assert_send_sync<T: Send + Sync>() {}
assert_send_sync::<NormalizeError>();
}
#[test]
fn normalized_types_are_send_and_sync() {
fn assert_send_sync<T: Send + Sync>() {}
assert_send_sync::<NormalizedFile>();
assert_send_sync::<NormalizedEntry>();
}
#[test]
fn empty_scenarios_list_normalizes_to_empty_entries() {
let yaml = r#"
version: 2
scenarios: []
"#;
let file = normalize_yaml(yaml).expect("must normalize empty list");
assert_eq!(file.version, 2);
assert!(file.entries.is_empty());
}
#[test]
fn merge_labels_both_none_returns_none() {
assert!(merge_labels(None, None).is_none());
}
#[test]
fn merge_labels_only_defaults_returns_defaults_clone() {
let mut d = BTreeMap::new();
d.insert("a".to_string(), "1".to_string());
let merged = merge_labels(Some(&d), None).expect("must return map");
assert_eq!(merged.get("a").map(String::as_str), Some("1"));
}
#[test]
fn merge_labels_only_entry_returns_entry() {
let mut e = BTreeMap::new();
e.insert("b".to_string(), "2".to_string());
let merged = merge_labels(None, Some(e)).expect("must return map");
assert_eq!(merged.get("b").map(String::as_str), Some("2"));
}
#[test]
fn merge_labels_entry_overrides_defaults_on_conflict() {
let mut d = BTreeMap::new();
d.insert("k".to_string(), "from_defaults".to_string());
let mut e = BTreeMap::new();
e.insert("k".to_string(), "from_entry".to_string());
let merged = merge_labels(Some(&d), Some(e)).expect("must return map");
assert_eq!(merged.get("k").map(String::as_str), Some("from_entry"));
}
#[rustfmt::skip]
#[rstest::rstest]
#[case::metrics("metrics", ExpectedEncoder::PrometheusText)]
#[case::histogram("histogram", ExpectedEncoder::PrometheusText)]
#[case::summary("summary", ExpectedEncoder::PrometheusText)]
#[case::logs("logs", ExpectedEncoder::JsonLines)]
fn default_encoder_per_signal_type(
#[case] signal_type: &str,
#[case] expected: ExpectedEncoder,
) {
let encoder = default_encoder_for(signal_type);
match expected {
ExpectedEncoder::PrometheusText => {
assert!(matches!(encoder, EncoderConfig::PrometheusText { .. }))
}
ExpectedEncoder::JsonLines => {
assert!(matches!(encoder, EncoderConfig::JsonLines { .. }))
}
}
}
#[test]
fn default_sink_is_stdout() {
assert!(matches!(default_sink(), SinkConfig::Stdout));
}
}