use std::{borrow::Cow, io, time::SystemTime};
use metrique_writer_core::{
Distribution, Entry, EntryWriter, MetricFlags, Observation, Unit, ValidationError, Value,
ValueWriter, format::Format, stream::IoStreamError,
};
#[derive(Debug, Clone, Copy, Default)]
#[non_exhaustive]
pub enum OutputStyle {
#[default]
Pretty,
#[non_exhaustive]
Json {
compact: bool,
},
MarkdownTable,
}
impl OutputStyle {
pub fn pretty() -> Self {
Self::Pretty
}
pub fn json() -> Self {
Self::Json { compact: false }
}
pub fn compact_json() -> Self {
Self::Json { compact: true }
}
pub fn markdown_table() -> Self {
Self::MarkdownTable
}
}
#[derive(Debug, Clone)]
pub struct Percentile {
label: String,
fraction: f64,
}
impl Percentile {
pub fn new(fraction: f64) -> Self {
let fraction = fraction.clamp(0.0, 1.0);
let label = if fraction == 0.0 {
"min".to_owned()
} else if fraction == 1.0 {
"max".to_owned()
} else {
let pct = fraction * 100.0;
if pct == pct.floor() {
format!("p{}", pct as u64)
} else {
format!("p{pct}")
}
};
Self { label, fraction }
}
}
fn default_percentiles() -> Vec<Percentile> {
vec![
Percentile::new(0.0),
Percentile::new(0.5),
Percentile::new(0.99),
Percentile::new(0.999),
Percentile::new(1.0),
]
}
#[derive(Debug, Clone)]
pub struct LocalFormat {
style: OutputStyle,
percentiles: Vec<Percentile>,
}
impl LocalFormat {
pub fn new(style: OutputStyle) -> Self {
Self {
style,
percentiles: default_percentiles(),
}
}
pub fn json() -> Self {
Self::new(OutputStyle::json())
}
pub fn compact_json() -> Self {
Self::new(OutputStyle::compact_json())
}
pub fn percentiles(mut self, percentiles: Vec<Percentile>) -> Self {
self.percentiles = percentiles;
self
}
}
impl Format for LocalFormat {
fn format(
&mut self,
entry: &impl Entry,
output: &mut impl io::Write,
) -> Result<(), IoStreamError> {
let mut collector = Collector::default();
entry.write(&mut collector);
match self.style {
OutputStyle::Pretty => write_pretty(output, &collector, &self.percentiles)?,
OutputStyle::Json { compact } => {
write_json(output, &collector, &self.percentiles, compact)?
}
OutputStyle::MarkdownTable => {
write_markdown_table(output, &collector, &self.percentiles)?;
}
}
Ok(())
}
}
#[derive(Default)]
struct Collector {
timestamp: Option<SystemTime>,
fields: Vec<Field>,
}
struct Field {
name: String,
data: FieldData,
}
enum FieldData {
String(String),
Metric {
observations: Vec<WeightedObservation>,
unit: Unit,
dimensions: Vec<(String, String)>,
is_distribution: bool,
},
}
#[derive(Debug, Clone, Copy)]
struct WeightedObservation {
value: f64,
weight: u64,
}
impl<'a> EntryWriter<'a> for Collector {
fn timestamp(&mut self, timestamp: SystemTime) {
self.timestamp = Some(timestamp);
}
fn value(&mut self, name: impl Into<Cow<'a, str>>, value: &(impl Value + ?Sized)) {
let name = name.into().into_owned();
let mut data = None;
value.write(FieldValueWriter(&mut data));
if let Some(data) = data {
self.fields.push(Field { name, data });
}
}
fn config(&mut self, _config: &'a dyn metrique_writer_core::EntryConfig) {
}
}
struct FieldValueWriter<'a>(&'a mut Option<FieldData>);
impl ValueWriter for FieldValueWriter<'_> {
fn string(self, value: &str) {
*self.0 = Some(FieldData::String(value.to_owned()));
}
fn metric<'a>(
self,
distribution: impl IntoIterator<Item = Observation>,
unit: Unit,
dimensions: impl IntoIterator<Item = (&'a str, &'a str)>,
_flags: MetricFlags<'_>,
) {
let is_distribution = _flags.downcast::<Distribution>().is_some();
let mut observations = Vec::new();
for obs in distribution {
match obs {
Observation::Unsigned(v) => observations.push(WeightedObservation {
value: v as f64,
weight: 1,
}),
Observation::Floating(v) => observations.push(WeightedObservation {
value: v,
weight: 1,
}),
Observation::Repeated { total, occurrences } if occurrences > 0 => {
observations.push(WeightedObservation {
value: total / occurrences as f64,
weight: occurrences,
});
}
_ => {}
}
}
let dimensions = dimensions
.into_iter()
.map(|(k, v)| (k.to_owned(), v.to_owned()))
.collect();
*self.0 = Some(FieldData::Metric {
observations,
unit,
dimensions,
is_distribution,
});
}
fn error(self, error: ValidationError) {
*self.0 = Some(FieldData::String(format!("ERROR: {error}")));
}
}
fn compute_percentiles<'a>(
observations: &[WeightedObservation],
percentiles: &'a [Percentile],
) -> Vec<(&'a str, f64)> {
if observations.is_empty() {
return Vec::new();
}
let mut sorted: Vec<WeightedObservation> = observations.to_vec();
sorted.sort_by(|a, b| {
a.value
.partial_cmp(&b.value)
.unwrap_or(std::cmp::Ordering::Equal)
});
let total_weight: u64 = sorted.iter().map(|o| o.weight).sum();
percentiles
.iter()
.map(|p| {
let target = if p.fraction <= 0.0 {
0
} else if p.fraction >= 1.0 {
total_weight
} else {
(total_weight as f64 * p.fraction).ceil() as u64
};
let mut cumulative = 0u64;
let mut value = sorted[0].value;
for obs in &sorted {
cumulative += obs.weight;
value = obs.value;
if cumulative >= target {
break;
}
}
(p.label.as_str(), value)
})
.collect()
}
fn total_count(observations: &[WeightedObservation]) -> u64 {
observations.iter().map(|o| o.weight).sum()
}
fn format_pretty_value(value: f64, unit: Unit) -> String {
match unit {
Unit::Second(scale) => {
let seconds = value / scale.reduction_factor() as f64;
format_duration_smart(seconds)
}
Unit::Byte(scale) => {
let bytes = value * scale.expansion_factor() as f64;
format_bytes_smart(bytes)
}
Unit::Percent => format!("{value:.1}%"),
Unit::Count => {
if value == value.floor() {
format!("{}", value as i64)
} else {
format!("{value:.2}")
}
}
Unit::None => {
if value == value.floor() && value.abs() < i64::MAX as f64 {
format!("{}", value as i64)
} else {
format!("{value:.3}")
}
}
_ => format!("{value:.3} {unit}"),
}
}
fn format_duration_smart(seconds: f64) -> String {
if seconds == 0.0 {
return "0s".to_owned();
}
let abs = seconds.abs();
if abs >= 1.0 {
format!("{seconds:.3}s")
} else if abs >= 0.001 {
format!("{:.3}ms", seconds * 1_000.0)
} else {
format!("{:.3}μs", seconds * 1_000_000.0)
}
}
fn format_bytes_smart(bytes: f64) -> String {
if bytes == 0.0 {
return "0B".to_owned();
}
let abs = bytes.abs();
if abs >= 1_000_000_000.0 {
format!("{:.2}GB", bytes / 1_000_000_000.0)
} else if abs >= 1_000_000.0 {
format!("{:.2}MB", bytes / 1_000_000.0)
} else if abs >= 1_000.0 {
format!("{:.2}KB", bytes / 1_000.0)
} else {
format!("{bytes:.0}B")
}
}
fn format_json_value(value: f64) -> serde_json::Value {
if value == value.floor() && value.abs() < i64::MAX as f64 {
serde_json::Value::Number(serde_json::Number::from(value as i64))
} else {
serde_json::Number::from_f64(value)
.map(serde_json::Value::Number)
.unwrap_or(serde_json::Value::Null)
}
}
fn format_timestamp(ts: SystemTime) -> String {
jiff::Timestamp::try_from(ts)
.map(|t| t.to_string())
.unwrap_or_else(|_| format!("{ts:?}"))
}
fn write_pretty(
output: &mut impl io::Write,
collector: &Collector,
percentiles: &[Percentile],
) -> io::Result<()> {
writeln!(output, "---")?;
if let Some(ts) = collector.timestamp {
writeln!(output, " timestamp: {}", format_timestamp(ts))?;
}
for field in &collector.fields {
match &field.data {
FieldData::String(s) => {
writeln!(output, " {}: {s}", field.name)?;
}
FieldData::Metric {
observations,
unit,
dimensions,
is_distribution,
} => {
if !dimensions.is_empty() {
let dims: Vec<String> =
dimensions.iter().map(|(k, v)| format!("{k}={v}")).collect();
writeln!(output, " {} [{}]:", field.name, dims.join(", "))?;
}
let show_histogram = *is_distribution || total_count(observations) > 1;
if !show_histogram {
let val = observations.first().map(|o| o.value).unwrap_or(0.0);
if dimensions.is_empty() {
writeln!(
output,
" {}: {}",
field.name,
format_pretty_value(val, *unit)
)?;
} else {
writeln!(output, " {}", format_pretty_value(val, *unit))?;
}
} else {
let count = total_count(observations);
if dimensions.is_empty() {
writeln!(output, " {} ({count} samples):", field.name)?;
} else {
writeln!(output, " ({count} samples):")?;
}
for (label, val) in compute_percentiles(observations, percentiles) {
writeln!(output, " {label}: {}", format_pretty_value(val, *unit))?;
}
}
}
}
}
Ok(())
}
fn write_json(
output: &mut impl io::Write,
collector: &Collector,
percentiles: &[Percentile],
compact: bool,
) -> io::Result<()> {
let mut map = serde_json::Map::new();
if let Some(ts) = collector.timestamp {
if let Ok(dur) = ts.duration_since(SystemTime::UNIX_EPOCH) {
map.insert("timestamp".to_owned(), format_json_value(dur.as_secs_f64()));
map.insert(
"timestamp_iso".to_owned(),
serde_json::Value::String(format_timestamp(ts)),
);
}
}
for field in &collector.fields {
match &field.data {
FieldData::String(s) => {
map.insert(field.name.clone(), serde_json::Value::String(s.clone()));
}
FieldData::Metric {
observations,
unit,
dimensions,
is_distribution,
} => {
let show_histogram = *is_distribution || total_count(observations) > 1;
if !show_histogram {
let val = observations.first().map(|o| o.value).unwrap_or(0.0);
map.insert(field.name.clone(), format_json_value(val));
} else {
let pcts = compute_percentiles(observations, percentiles);
let count = total_count(observations);
let mut pct_map = serde_json::Map::new();
pct_map.insert("count".to_owned(), serde_json::Value::Number(count.into()));
for (label, val) in pcts {
pct_map.insert(label.to_owned(), format_json_value(val));
}
map.insert(field.name.clone(), serde_json::Value::Object(pct_map));
}
if *unit != Unit::None {
map.insert(
format!("{}_unit", field.name),
serde_json::Value::String(unit.to_string()),
);
}
if !dimensions.is_empty() {
let dim_map: serde_json::Map<String, serde_json::Value> = dimensions
.iter()
.map(|(k, v)| (k.clone(), serde_json::Value::String(v.clone())))
.collect();
map.insert(
format!("{}_dimensions", field.name),
serde_json::Value::Object(dim_map),
);
}
}
}
}
let obj = serde_json::Value::Object(map);
let json = if compact {
serde_json::to_string(&obj)
} else {
serde_json::to_string_pretty(&obj)
}
.map_err(io::Error::other)?;
writeln!(output, "{json}")?;
Ok(())
}
fn write_markdown_table(
output: &mut impl io::Write,
collector: &Collector,
percentiles: &[Percentile],
) -> io::Result<()> {
let mut rows: Vec<(String, String)> = Vec::new();
if let Some(ts) = collector.timestamp {
rows.push(("timestamp".to_owned(), format_timestamp(ts)));
}
for field in &collector.fields {
match &field.data {
FieldData::String(s) => {
rows.push((field.name.clone(), s.clone()));
}
FieldData::Metric {
observations,
unit,
dimensions,
is_distribution,
} => {
for (k, v) in dimensions {
rows.push((format!("{}.{k}", field.name), v.clone()));
}
let show_histogram = *is_distribution || total_count(observations) > 1;
if !show_histogram {
let val = observations.first().map(|o| o.value).unwrap_or(0.0);
rows.push((field.name.clone(), format_pretty_value(val, *unit)));
} else {
for (label, val) in compute_percentiles(observations, percentiles) {
rows.push((
format!("{}.{label}", field.name),
format_pretty_value(val, *unit),
));
}
rows.push((
format!("{}.count", field.name),
total_count(observations).to_string(),
));
}
}
}
}
let name_width = rows.iter().map(|(n, _)| n.len()).max().unwrap_or(4).max(4);
let value_width = rows.iter().map(|(_, v)| v.len()).max().unwrap_or(5).max(5);
writeln!(
output,
"| {:<name_width$} | {:<value_width$} |",
"Name", "Value"
)?;
writeln!(output, "| {:-<name_width$} | {:-<value_width$} |", "", "")?;
for (name, value) in &rows {
writeln!(
output,
"| {:<name_width$} | {:<value_width$} |",
name, value
)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use metrique_writer_core::unit::{self, NegativeScale, UnitTag};
#[test]
fn test_pretty_single_value() {
let mut format = LocalFormat::new(OutputStyle::Pretty);
let entry = SimpleEntry {
name: "GetUser",
latency_ms: 42.5,
};
let mut buf = Vec::new();
format.format(&entry, &mut buf).unwrap();
let output = String::from_utf8(buf).unwrap();
assert!(output.contains("operation: GetUser"));
assert!(output.contains("latency: 42.500ms"));
}
#[test]
fn test_json_output() {
let mut format = LocalFormat::json();
let entry = SimpleEntry {
name: "GetUser",
latency_ms: 42.5,
};
let mut buf = Vec::new();
format.format(&entry, &mut buf).unwrap();
let output = String::from_utf8(buf).unwrap();
let parsed: serde_json::Value = serde_json::from_str(output.trim()).unwrap();
assert_eq!(parsed["operation"], "GetUser");
assert_eq!(parsed["latency_unit"], "Milliseconds");
}
#[test]
fn test_markdown_table() {
let mut format = LocalFormat::new(OutputStyle::MarkdownTable);
let entry = SimpleEntry {
name: "GetUser",
latency_ms: 42.5,
};
let mut buf = Vec::new();
format.format(&entry, &mut buf).unwrap();
let output = String::from_utf8(buf).unwrap();
assert!(output.contains("| Name"));
assert!(output.contains("operation"));
}
#[test]
fn test_histogram_percentiles() {
let mut format = LocalFormat::new(OutputStyle::Pretty);
let entry = HistogramEntry {
values: vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0],
};
let mut buf = Vec::new();
format.format(&entry, &mut buf).unwrap();
let output = String::from_utf8(buf).unwrap();
assert!(output.contains("10 samples"));
assert!(output.contains("min:"));
assert!(output.contains("max:"));
assert!(output.contains("p50:"));
assert!(output.contains("p99:"));
}
#[test]
fn test_smart_duration_display() {
assert_eq!(format_duration_smart(0.0), "0s");
assert_eq!(format_duration_smart(1.5), "1.500s");
assert_eq!(format_duration_smart(0.042), "42.000ms");
assert_eq!(format_duration_smart(0.000_042), "42.000μs");
}
#[test]
fn test_smart_bytes_display() {
assert_eq!(format_bytes_smart(0.0), "0B");
assert_eq!(format_bytes_smart(512.0), "512B");
assert_eq!(format_bytes_smart(1_500.0), "1.50KB");
assert_eq!(format_bytes_smart(2_500_000.0), "2.50MB");
assert_eq!(format_bytes_smart(3_000_000_000.0), "3.00GB");
}
#[test]
fn test_percentile_computation() {
let data: Vec<WeightedObservation> = (1..=100)
.map(|i| WeightedObservation {
value: i as f64,
weight: 1,
})
.collect();
let percentiles = default_percentiles();
let pcts = compute_percentiles(&data, &percentiles);
assert_eq!(pcts[0], ("min", 1.0));
assert_eq!(pcts[4], ("max", 100.0));
assert!((pcts[1].1 - 50.0).abs() <= 1.0);
}
struct SimpleEntry {
name: &'static str,
latency_ms: f64,
}
impl Entry for SimpleEntry {
fn write<'a>(&'a self, writer: &mut impl EntryWriter<'a>) {
writer.value("operation", self.name);
writer.value("latency", &MillisValue(self.latency_ms));
}
}
struct MillisValue(f64);
impl Value for MillisValue {
fn write(&self, writer: impl ValueWriter) {
writer.metric(
[Observation::Floating(self.0)],
Unit::Second(NegativeScale::Milli),
[],
MetricFlags::empty(),
);
}
}
struct HistogramEntry {
values: Vec<f64>,
}
impl Entry for HistogramEntry {
fn write<'a>(&'a self, writer: &mut impl EntryWriter<'a>) {
writer.value("latency", &HistogramValue(&self.values));
}
}
struct HistogramValue<'a>(&'a [f64]);
impl Value for HistogramValue<'_> {
fn write(&self, writer: impl ValueWriter) {
writer.metric(
self.0.iter().copied().map(Observation::Floating),
unit::None::UNIT,
[],
MetricFlags::empty(),
);
}
}
#[test]
fn distribution_flag_forces_histogram_display() {
let mut format = LocalFormat::new(OutputStyle::Pretty);
let entry = SingleRepeatedDistribution;
let mut buf = Vec::new();
format.format(&entry, &mut buf).unwrap();
let output = String::from_utf8(buf).unwrap();
assert!(output.contains("1000 samples"), "output was: {output}");
assert!(output.contains("min:"));
assert!(output.contains("max:"));
}
struct SingleRepeatedDistribution;
impl Entry for SingleRepeatedDistribution {
fn write<'a>(&'a self, writer: &mut impl EntryWriter<'a>) {
writer.value("latency", &RepeatedDistributionValue);
}
}
struct RepeatedDistributionValue;
impl Value for RepeatedDistributionValue {
fn write(&self, writer: impl ValueWriter) {
writer.metric(
[Observation::Repeated {
total: 5000.0,
occurrences: 1000,
}],
unit::None::UNIT,
[],
MetricFlags::upcast(&Distribution),
);
}
}
}