use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, RwLock};
use std::time::{Duration, Instant};
use crate::config::validate::{
validate_config, validate_histogram_config, validate_log_config, validate_summary_config,
};
use crate::config::ScenarioEntry;
use crate::schedule::handle::ScenarioHandle;
use crate::schedule::histogram_runner::run_with_sink as run_histogram_with_sink;
use crate::schedule::log_runner::run_logs_with_sink;
use crate::schedule::runner::run_with_sink;
use crate::schedule::stats::ScenarioStats;
use crate::schedule::summary_runner::run_with_sink as run_summary_with_sink;
use crate::sink::create_sink;
use crate::{RuntimeError, SondaError};
pub fn validate_entry(entry: &ScenarioEntry) -> Result<(), SondaError> {
match entry {
ScenarioEntry::Metrics(config) => validate_config(config),
ScenarioEntry::Logs(config) => validate_log_config(config),
ScenarioEntry::Histogram(config) => validate_histogram_config(config),
ScenarioEntry::Summary(config) => validate_summary_config(config),
}
}
pub fn launch_scenario(
id: String,
entry: ScenarioEntry,
shutdown: Arc<AtomicBool>,
start_delay: Option<Duration>,
) -> Result<ScenarioHandle, SondaError> {
let stats = Arc::new(RwLock::new(ScenarioStats::default()));
let stats_for_thread = Arc::clone(&stats);
let shutdown_for_thread = Arc::clone(&shutdown);
let (name, target_rate) = match &entry {
ScenarioEntry::Metrics(c) => (c.name.clone(), c.rate),
ScenarioEntry::Logs(c) => (c.name.clone(), c.rate),
ScenarioEntry::Histogram(c) => (c.name.clone(), c.rate),
ScenarioEntry::Summary(c) => (c.name.clone(), c.rate),
};
let started_at = Instant::now();
shutdown.store(true, Ordering::SeqCst);
let thread = std::thread::Builder::new()
.name(format!("sonda-{}", name))
.spawn(move || -> Result<(), SondaError> {
if let Some(delay) = start_delay {
let deadline = std::time::Instant::now() + delay;
while std::time::Instant::now() < deadline {
if !shutdown_for_thread.load(Ordering::SeqCst) {
return Ok(());
}
let remaining = deadline.saturating_duration_since(std::time::Instant::now());
let sleep_chunk = remaining.min(Duration::from_millis(50));
if sleep_chunk > Duration::ZERO {
std::thread::sleep(sleep_chunk);
}
}
}
match entry {
ScenarioEntry::Metrics(config) => {
let mut sink = create_sink(&config.sink, None)?;
run_with_sink(
&config,
sink.as_mut(),
Some(shutdown_for_thread.as_ref()),
Some(Arc::clone(&stats_for_thread)),
)
}
ScenarioEntry::Logs(config) => {
let mut sink = create_sink(&config.sink, config.labels.as_ref())?;
run_logs_with_sink(
&config,
sink.as_mut(),
Some(shutdown_for_thread.as_ref()),
Some(Arc::clone(&stats_for_thread)),
)
}
ScenarioEntry::Histogram(config) => {
let mut sink = create_sink(&config.sink, None)?;
run_histogram_with_sink(
&config,
sink.as_mut(),
Some(shutdown_for_thread.as_ref()),
Some(Arc::clone(&stats_for_thread)),
)
}
ScenarioEntry::Summary(config) => {
let mut sink = create_sink(&config.sink, None)?;
run_summary_with_sink(
&config,
sink.as_mut(),
Some(shutdown_for_thread.as_ref()),
Some(Arc::clone(&stats_for_thread)),
)
}
}
})
.map_err(|e| SondaError::Runtime(RuntimeError::SpawnFailed(e)))?;
Ok(ScenarioHandle {
id,
name,
shutdown,
thread: Some(thread),
started_at,
stats,
target_rate,
})
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::time::Duration;
use super::*;
use crate::config::{
BaseScheduleConfig, DistributionConfig, HistogramScenarioConfig, LogScenarioConfig,
ScenarioConfig, ScenarioEntry, SummaryScenarioConfig,
};
use crate::encoder::EncoderConfig;
use crate::generator::{GeneratorConfig, LogGeneratorConfig, TemplateConfig};
use crate::sink::SinkConfig;
fn metrics_entry(name: &str) -> ScenarioEntry {
ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: name.to_string(),
rate: 50.0,
duration: Some("200ms".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
})
}
fn logs_entry(name: &str) -> ScenarioEntry {
ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: name.to_string(),
rate: 50.0,
duration: Some("200ms".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: LogGeneratorConfig::Template {
templates: vec![TemplateConfig {
message: "test log".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(0),
},
encoder: EncoderConfig::JsonLines { precision: None },
})
}
fn metrics_entry_indefinite(name: &str) -> ScenarioEntry {
ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: name.to_string(),
rate: 100.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
})
}
fn logs_entry_indefinite(name: &str) -> ScenarioEntry {
ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: name.to_string(),
rate: 100.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: LogGeneratorConfig::Template {
templates: vec![TemplateConfig {
message: "indefinite log".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(1),
},
encoder: EncoderConfig::JsonLines { precision: None },
})
}
#[test]
fn validate_entry_accepts_valid_metrics_entry() {
let entry = metrics_entry("valid_metrics");
let result = validate_entry(&entry);
assert!(
result.is_ok(),
"validate_entry must accept a valid metrics entry: {result:?}"
);
}
#[test]
fn validate_entry_accepts_valid_logs_entry() {
let entry = logs_entry("valid_logs");
let result = validate_entry(&entry);
assert!(
result.is_ok(),
"validate_entry must accept a valid logs entry: {result:?}"
);
}
#[test]
fn validate_entry_rejects_metrics_entry_with_zero_rate() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "bad_metrics".to_string(),
rate: 0.0, duration: Some("1s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
let result = validate_entry(&entry);
assert!(
result.is_err(),
"validate_entry must reject a metrics entry with rate=0"
);
}
#[test]
fn validate_entry_rejects_metrics_entry_with_negative_rate() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "neg_rate".to_string(),
rate: -5.0,
duration: Some("1s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
let result = validate_entry(&entry);
assert!(
result.is_err(),
"validate_entry must reject negative rate for metrics entry"
);
}
#[test]
fn validate_entry_rejects_logs_entry_with_zero_rate() {
let entry = ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: "bad_logs".to_string(),
rate: 0.0, duration: Some("1s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: LogGeneratorConfig::Template {
templates: vec![TemplateConfig {
message: "msg".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(0),
},
encoder: EncoderConfig::JsonLines { precision: None },
});
let result = validate_entry(&entry);
assert!(
result.is_err(),
"validate_entry must reject a logs entry with rate=0"
);
}
#[test]
fn validate_entry_rejects_metrics_entry_with_bad_duration() {
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "bad_dur".to_string(),
rate: 10.0,
duration: Some("not_a_duration".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
let result = validate_entry(&entry);
assert!(
result.is_err(),
"validate_entry must reject an invalid duration string"
);
}
#[test]
fn launch_scenario_metrics_returns_running_handle() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = metrics_entry_indefinite("launch_metrics");
let mut handle =
launch_scenario("test-id-1".to_string(), entry, Arc::clone(&shutdown), None)
.expect("launch must succeed for valid metrics entry");
assert!(
handle.is_running(),
"handle must report is_running() == true immediately after launch"
);
assert_eq!(handle.id, "test-id-1");
assert_eq!(handle.name, "launch_metrics");
handle.stop();
handle
.join(Some(Duration::from_secs(2)))
.expect("join must succeed after stop");
}
#[test]
fn launch_scenario_logs_returns_running_handle() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = logs_entry_indefinite("launch_logs");
let mut handle =
launch_scenario("test-id-2".to_string(), entry, Arc::clone(&shutdown), None)
.expect("launch must succeed for valid logs entry");
assert!(
handle.is_running(),
"handle must report is_running() == true for a launched logs scenario"
);
assert_eq!(handle.name, "launch_logs");
handle.stop();
handle
.join(Some(Duration::from_secs(2)))
.expect("join must succeed after stop");
}
#[test]
fn stop_then_join_metrics_scenario_returns_ok() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = metrics_entry_indefinite("stop_join_metrics");
let mut handle = launch_scenario("id-stop-1".to_string(), entry, shutdown, None)
.expect("launch must succeed");
handle.stop();
let result = handle.join(Some(Duration::from_secs(3)));
assert!(
result.is_ok(),
"join after stop must return Ok for metrics: {result:?}"
);
assert!(
!handle.is_running(),
"is_running must be false after stop + join"
);
}
#[test]
fn stop_then_join_logs_scenario_returns_ok() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = logs_entry_indefinite("stop_join_logs");
let mut handle = launch_scenario("id-stop-2".to_string(), entry, shutdown, None)
.expect("launch must succeed");
handle.stop();
let result = handle.join(Some(Duration::from_secs(3)));
assert!(
result.is_ok(),
"join after stop must return Ok for logs: {result:?}"
);
}
#[test]
fn finite_duration_scenario_exits_naturally_and_join_returns_ok() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = metrics_entry("natural_exit");
let mut handle = launch_scenario("id-natural".to_string(), entry, shutdown, None)
.expect("launch must succeed");
let result = handle.join(Some(Duration::from_secs(3)));
assert!(
result.is_ok(),
"natural exit must result in Ok join: {result:?}"
);
}
#[test]
fn stats_snapshot_shows_nonzero_events_after_brief_run() {
use std::thread;
let shutdown = Arc::new(AtomicBool::new(true));
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "stats_test".to_string(),
rate: 500.0,
duration: None, gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
let mut handle =
launch_scenario("id-stats".to_string(), entry, Arc::clone(&shutdown), None)
.expect("launch must succeed");
thread::sleep(Duration::from_millis(200));
let snap = handle.stats_snapshot();
assert!(
snap.total_events > 0,
"stats_snapshot must show non-zero total_events after running for 200ms, got {}",
snap.total_events
);
assert!(
snap.bytes_emitted > 0,
"stats_snapshot must show non-zero bytes_emitted, got {}",
snap.bytes_emitted
);
handle.stop();
handle.join(Some(Duration::from_secs(2))).ok();
}
#[test]
fn stats_snapshot_shows_nonzero_events_for_logs_scenario() {
use std::thread;
let shutdown = Arc::new(AtomicBool::new(true));
let entry = ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: "logs_stats_test".to_string(),
rate: 500.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: LogGeneratorConfig::Template {
templates: vec![TemplateConfig {
message: "stat tracking log".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(42),
},
encoder: EncoderConfig::JsonLines { precision: None },
});
let mut handle = launch_scenario(
"id-log-stats".to_string(),
entry,
Arc::clone(&shutdown),
None,
)
.expect("launch must succeed");
thread::sleep(Duration::from_millis(200));
let snap = handle.stats_snapshot();
assert!(
snap.total_events > 0,
"log scenario stats must show non-zero total_events, got {}",
snap.total_events
);
handle.stop();
handle.join(Some(Duration::from_secs(2))).ok();
}
#[test]
fn elapsed_is_positive_after_launch() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = metrics_entry_indefinite("elapsed_test");
let mut handle = launch_scenario("id-elapsed".to_string(), entry, shutdown, None)
.expect("launch must succeed");
let d = handle.elapsed();
assert!(
d >= Duration::ZERO,
"elapsed must be non-negative right after launch, got {d:?}"
);
handle.stop();
handle.join(None).ok();
}
#[test]
fn launch_scenario_resets_shutdown_flag_to_true() {
let shutdown = Arc::new(AtomicBool::new(false));
let entry = metrics_entry_indefinite("flag_reset");
let mut handle = launch_scenario("id-flag".to_string(), entry, Arc::clone(&shutdown), None)
.expect("launch must succeed");
assert!(
shutdown.load(Ordering::SeqCst),
"launch_scenario must reset the shutdown flag to true"
);
handle.stop();
handle.join(None).ok();
}
#[test]
fn launch_with_no_start_delay_emits_events_immediately() {
use std::thread;
let shutdown = Arc::new(AtomicBool::new(true));
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "no_delay_test".to_string(),
rate: 500.0,
duration: None,
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
let mut handle =
launch_scenario("id-nodelay".to_string(), entry, Arc::clone(&shutdown), None)
.expect("launch must succeed");
thread::sleep(Duration::from_millis(200));
let snap = handle.stats_snapshot();
assert!(
snap.total_events > 0,
"with no start_delay, events should be emitted within 200ms, got {}",
snap.total_events
);
handle.stop();
handle.join(Some(Duration::from_secs(2))).ok();
}
#[test]
fn launch_with_start_delay_does_not_emit_during_delay() {
use std::thread;
let shutdown = Arc::new(AtomicBool::new(true));
let entry = ScenarioEntry::Metrics(ScenarioConfig {
base: BaseScheduleConfig {
name: "delay_test".to_string(),
rate: 500.0,
duration: Some("1s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: GeneratorConfig::Constant { value: 1.0 },
encoder: EncoderConfig::PrometheusText { precision: None },
});
let delay = Duration::from_millis(500);
let mut handle = launch_scenario(
"id-delay".to_string(),
entry,
Arc::clone(&shutdown),
Some(delay),
)
.expect("launch must succeed");
thread::sleep(Duration::from_millis(100));
let snap_early = handle.stats_snapshot();
assert_eq!(
snap_early.total_events, 0,
"during start_delay, total_events should be 0, got {}",
snap_early.total_events
);
thread::sleep(Duration::from_millis(600));
let snap_after = handle.stats_snapshot();
assert!(
snap_after.total_events > 0,
"after start_delay expires, events should be emitted, got {}",
snap_after.total_events
);
handle.stop();
handle.join(Some(Duration::from_secs(2))).ok();
}
#[test]
fn shutdown_during_start_delay_exits_cleanly() {
use std::thread;
use std::time::Instant;
let shutdown = Arc::new(AtomicBool::new(true));
let entry = metrics_entry_indefinite("shutdown_delay");
let delay = Duration::from_secs(10);
let mut handle = launch_scenario(
"id-shutdown-delay".to_string(),
entry,
Arc::clone(&shutdown),
Some(delay),
)
.expect("launch must succeed");
thread::sleep(Duration::from_millis(100));
handle.stop();
let start = Instant::now();
let result = handle.join(Some(Duration::from_secs(2)));
let elapsed = start.elapsed();
assert!(
result.is_ok(),
"join after shutdown during delay must return Ok: {result:?}"
);
assert!(
elapsed < Duration::from_secs(2),
"thread must exit promptly after shutdown during delay, took {:?}",
elapsed
);
let snap = handle.stats_snapshot();
assert_eq!(
snap.total_events, 0,
"no events should be emitted when shutdown during delay, got {}",
snap.total_events
);
}
#[test]
fn launch_logs_with_start_delay_does_not_emit_during_delay() {
use std::thread;
let shutdown = Arc::new(AtomicBool::new(true));
let entry = ScenarioEntry::Logs(LogScenarioConfig {
base: BaseScheduleConfig {
name: "log_delay_test".to_string(),
rate: 500.0,
duration: Some("1s".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
generator: LogGeneratorConfig::Template {
templates: vec![TemplateConfig {
message: "delayed log".to_string(),
field_pools: BTreeMap::new(),
}],
severity_weights: None,
seed: Some(0),
},
encoder: EncoderConfig::JsonLines { precision: None },
});
let delay = Duration::from_millis(500);
let mut handle = launch_scenario(
"id-log-delay".to_string(),
entry,
Arc::clone(&shutdown),
Some(delay),
)
.expect("launch must succeed");
thread::sleep(Duration::from_millis(100));
let snap_early = handle.stats_snapshot();
assert_eq!(
snap_early.total_events, 0,
"logs scenario should not emit during start_delay, got {}",
snap_early.total_events
);
thread::sleep(Duration::from_millis(600));
let snap_after = handle.stats_snapshot();
assert!(
snap_after.total_events > 0,
"logs scenario should emit after delay, got {}",
snap_after.total_events
);
handle.stop();
handle.join(Some(Duration::from_secs(2))).ok();
}
fn histogram_entry(name: &str) -> ScenarioEntry {
ScenarioEntry::Histogram(HistogramScenarioConfig {
base: BaseScheduleConfig {
name: name.to_string(),
rate: 50.0,
duration: Some("200ms".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
buckets: None,
distribution: DistributionConfig::Exponential { rate: 10.0 },
observations_per_tick: Some(50),
mean_shift_per_sec: None,
seed: Some(42),
encoder: EncoderConfig::PrometheusText { precision: None },
})
}
fn summary_entry(name: &str) -> ScenarioEntry {
ScenarioEntry::Summary(SummaryScenarioConfig {
base: BaseScheduleConfig {
name: name.to_string(),
rate: 50.0,
duration: Some("200ms".to_string()),
gaps: None,
bursts: None,
cardinality_spikes: None,
dynamic_labels: None,
labels: None,
sink: SinkConfig::Stdout,
phase_offset: None,
clock_group: None,
jitter: None,
jitter_seed: None,
},
quantiles: None,
distribution: DistributionConfig::Normal {
mean: 0.1,
stddev: 0.02,
},
observations_per_tick: Some(50),
mean_shift_per_sec: None,
seed: Some(42),
encoder: EncoderConfig::PrometheusText { precision: None },
})
}
#[test]
fn launch_histogram_scenario_runs_to_completion() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = histogram_entry("launch_histogram");
let mut handle = launch_scenario(
"id-histogram".to_string(),
entry,
Arc::clone(&shutdown),
None,
)
.expect("launch must succeed for valid histogram entry");
let result = handle.join(Some(Duration::from_secs(5)));
assert!(
result.is_ok(),
"histogram scenario must run to completion: {result:?}"
);
}
#[test]
fn launch_summary_scenario_runs_to_completion() {
let shutdown = Arc::new(AtomicBool::new(true));
let entry = summary_entry("launch_summary");
let mut handle =
launch_scenario("id-summary".to_string(), entry, Arc::clone(&shutdown), None)
.expect("launch must succeed for valid summary entry");
let result = handle.join(Some(Duration::from_secs(5)));
assert!(
result.is_ok(),
"summary scenario must run to completion: {result:?}"
);
}
#[test]
fn validate_entry_accepts_valid_histogram_entry() {
let entry = histogram_entry("valid_histogram");
let result = validate_entry(&entry);
assert!(
result.is_ok(),
"validate_entry must accept a valid histogram entry: {result:?}"
);
}
#[test]
fn validate_entry_accepts_valid_summary_entry() {
let entry = summary_entry("valid_summary");
let result = validate_entry(&entry);
assert!(
result.is_ok(),
"validate_entry must accept a valid summary entry: {result:?}"
);
}
}