pub mod influx;
pub mod json;
pub mod prometheus;
#[cfg(feature = "remote-write")]
pub mod remote_write;
pub mod syslog;
use crate::model::log::LogEvent;
use crate::model::metric::MetricEvent;
pub trait Encoder: Send + Sync {
fn encode_metric(
&self,
event: &MetricEvent,
buf: &mut Vec<u8>,
) -> Result<(), crate::SondaError>;
fn encode_log(&self, _event: &LogEvent, _buf: &mut Vec<u8>) -> Result<(), crate::SondaError> {
Err(crate::SondaError::Encoder(
crate::EncoderError::NotSupported("log encoding not supported by this encoder".into()),
))
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "config", derive(serde::Deserialize))]
#[cfg_attr(feature = "config", serde(tag = "type"))]
pub enum EncoderConfig {
#[cfg_attr(feature = "config", serde(rename = "prometheus_text"))]
PrometheusText {
#[cfg_attr(feature = "config", serde(default))]
precision: Option<u8>,
},
#[cfg_attr(feature = "config", serde(rename = "influx_lp"))]
InfluxLineProtocol {
field_key: Option<String>,
#[cfg_attr(feature = "config", serde(default))]
precision: Option<u8>,
},
#[cfg_attr(feature = "config", serde(rename = "json_lines"))]
JsonLines {
#[cfg_attr(feature = "config", serde(default))]
precision: Option<u8>,
},
#[cfg_attr(feature = "config", serde(rename = "syslog"))]
Syslog {
hostname: Option<String>,
app_name: Option<String>,
},
#[cfg(feature = "remote-write")]
#[cfg_attr(feature = "config", serde(rename = "remote_write"))]
RemoteWrite,
}
pub fn create_encoder(config: &EncoderConfig) -> Box<dyn Encoder> {
match config {
EncoderConfig::PrometheusText { precision } => {
Box::new(prometheus::PrometheusText::new(*precision))
}
EncoderConfig::InfluxLineProtocol {
field_key,
precision,
} => Box::new(influx::InfluxLineProtocol::new(
field_key.clone(),
*precision,
)),
EncoderConfig::JsonLines { precision } => Box::new(json::JsonLines::new(*precision)),
EncoderConfig::Syslog { hostname, app_name } => {
Box::new(syslog::Syslog::new(hostname.clone(), app_name.clone()))
}
#[cfg(feature = "remote-write")]
EncoderConfig::RemoteWrite => Box::new(remote_write::RemoteWriteEncoder::new()),
}
}
pub(crate) fn write_value(buf: &mut Vec<u8>, value: f64, precision: Option<u8>) {
use std::io::Write as _;
match precision {
None => write!(buf, "{}", value),
Some(n) => write!(buf, "{:.1$}", value, n as usize),
}
.expect("write to Vec<u8> is infallible");
}
pub(crate) const RFC3339_MILLIS_LEN: usize = 24;
pub(crate) fn format_rfc3339_millis(
ts: std::time::SystemTime,
buf: &mut Vec<u8>,
) -> Result<(), crate::SondaError> {
let arr = format_rfc3339_millis_array(ts)?;
buf.extend_from_slice(&arr);
Ok(())
}
pub(crate) fn format_rfc3339_millis_array(
ts: std::time::SystemTime,
) -> Result<[u8; RFC3339_MILLIS_LEN], crate::SondaError> {
use std::time::UNIX_EPOCH;
let duration = ts
.duration_since(UNIX_EPOCH)
.map_err(|e| crate::SondaError::Encoder(crate::EncoderError::TimestampBeforeEpoch(e)))?;
let total_secs = duration.as_secs();
let millis = duration.subsec_millis();
let days = total_secs / 86400;
let time_of_day = total_secs % 86400;
let hour = time_of_day / 3600;
let minute = (time_of_day % 3600) / 60;
let second = time_of_day % 60;
let z = days as i64 + 719468;
let era = if z >= 0 { z } else { z - 146096 } / 146097;
let doe = (z - era * 146097) as u64;
let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365;
let y = yoe as i64 + era * 400;
let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
let mp = (5 * doy + 2) / 153;
let day = doy - (153 * mp + 2) / 5 + 1;
let month = if mp < 10 { mp + 3 } else { mp - 9 };
let year = if month <= 2 { y + 1 } else { y };
let mut arr = [0u8; RFC3339_MILLIS_LEN];
use std::io::Write as _;
let mut cursor = &mut arr[..];
write!(
cursor,
"{year:04}-{month:02}-{day:02}T{hour:02}:{minute:02}:{second:02}.{millis:03}Z",
)
.expect("RFC 3339 millis timestamp is always exactly 24 bytes");
Ok(arr)
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(feature = "config")]
#[test]
fn encoder_config_prometheus_text_deserializes_with_type_field() {
let yaml = "type: prometheus_text";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(config, EncoderConfig::PrometheusText { .. }));
}
#[cfg(feature = "config")]
#[test]
fn encoder_config_json_lines_deserializes_with_type_field() {
let yaml = "type: json_lines";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(config, EncoderConfig::JsonLines { .. }));
}
#[cfg(feature = "config")]
#[test]
fn encoder_config_influx_lp_without_field_key_deserializes_with_type_field() {
let yaml = "type: influx_lp";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(
config,
EncoderConfig::InfluxLineProtocol {
field_key: None,
precision: None
}
));
}
#[cfg(feature = "config")]
#[test]
fn encoder_config_influx_lp_with_field_key_deserializes_with_type_field() {
let yaml = "type: influx_lp\nfield_key: requests";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(
config,
EncoderConfig::InfluxLineProtocol { field_key: Some(ref k), .. } if k == "requests"
));
}
#[cfg(feature = "config")]
#[test]
fn encoder_config_unknown_type_returns_error() {
let yaml = "type: no_such_encoder";
let result: Result<EncoderConfig, _> = serde_yaml_ng::from_str(yaml);
assert!(
result.is_err(),
"unknown type tag should fail deserialization"
);
}
#[cfg(feature = "config")]
#[test]
fn encoder_config_missing_type_field_returns_error() {
let yaml = "prometheus_text";
let result: Result<EncoderConfig, _> = serde_yaml_ng::from_str(yaml);
assert!(
result.is_err(),
"missing type field should fail deserialization"
);
}
#[cfg(feature = "config")]
#[test]
fn encoder_config_old_external_tag_format_is_rejected() {
let yaml = "!prometheus_text";
let result: Result<EncoderConfig, _> = serde_yaml_ng::from_str(yaml);
assert!(
result.is_err(),
"externally-tagged YAML format must be rejected in favour of internally-tagged"
);
}
#[test]
fn create_encoder_prometheus_text_succeeds() {
let config = EncoderConfig::PrometheusText { precision: None };
let _enc = create_encoder(&config);
}
#[test]
fn create_encoder_json_lines_succeeds() {
let config = EncoderConfig::JsonLines { precision: None };
let _enc = create_encoder(&config);
}
#[test]
fn create_encoder_influx_lp_no_field_key_succeeds() {
let config = EncoderConfig::InfluxLineProtocol {
field_key: None,
precision: None,
};
let _enc = create_encoder(&config);
}
#[test]
fn create_encoder_influx_lp_with_field_key_succeeds() {
let config = EncoderConfig::InfluxLineProtocol {
field_key: Some("bytes".to_string()),
precision: None,
};
let _enc = create_encoder(&config);
}
#[test]
fn encoder_config_is_send_and_sync() {
fn assert_send_sync<T: Send + Sync>() {}
assert_send_sync::<EncoderConfig>();
}
#[test]
fn encoder_config_prometheus_text_is_cloneable_and_debuggable() {
let config = EncoderConfig::PrometheusText { precision: None };
let cloned = config.clone();
assert!(matches!(cloned, EncoderConfig::PrometheusText { .. }));
let s = format!("{config:?}");
assert!(s.contains("PrometheusText"));
}
#[test]
fn encoder_config_json_lines_is_cloneable_and_debuggable() {
let config = EncoderConfig::JsonLines { precision: None };
let cloned = config.clone();
assert!(matches!(cloned, EncoderConfig::JsonLines { .. }));
let s = format!("{config:?}");
assert!(s.contains("JsonLines"));
}
#[test]
fn encoder_config_influx_lp_is_cloneable_and_debuggable() {
let config = EncoderConfig::InfluxLineProtocol {
field_key: Some("val".to_string()),
precision: None,
};
let cloned = config.clone();
assert!(matches!(
cloned,
EncoderConfig::InfluxLineProtocol { field_key: Some(ref k), .. } if k == "val"
));
let s = format!("{config:?}");
assert!(s.contains("InfluxLineProtocol"));
}
fn make_log_event() -> crate::model::log::LogEvent {
use std::collections::BTreeMap;
crate::model::log::LogEvent::new(
crate::model::log::Severity::Info,
"test message".to_string(),
crate::model::metric::Labels::default(),
BTreeMap::new(),
)
}
#[test]
fn prometheus_encoder_encode_log_returns_not_supported_error() {
let encoder = create_encoder(&EncoderConfig::PrometheusText { precision: None });
let event = make_log_event();
let mut buf = Vec::new();
let result = encoder.encode_log(&event, &mut buf);
assert!(
result.is_err(),
"prometheus encoder must return an error for encode_log"
);
let err = result.unwrap_err();
let msg = err.to_string();
assert!(
msg.contains("not supported"),
"error message should contain 'not supported', got: {msg}"
);
}
#[test]
fn influx_encoder_encode_log_returns_not_supported_error() {
let encoder = create_encoder(&EncoderConfig::InfluxLineProtocol {
field_key: None,
precision: None,
});
let event = make_log_event();
let mut buf = Vec::new();
let result = encoder.encode_log(&event, &mut buf);
assert!(
result.is_err(),
"influx encoder must return an error for encode_log"
);
let err = result.unwrap_err();
let msg = err.to_string();
assert!(
msg.contains("not supported"),
"error message should contain 'not supported', got: {msg}"
);
}
#[test]
fn json_lines_encoder_encode_log_succeeds() {
let encoder = create_encoder(&EncoderConfig::JsonLines { precision: None });
let event = make_log_event();
let mut buf = Vec::new();
let result = encoder.encode_log(&event, &mut buf);
assert!(
result.is_ok(),
"json_lines encoder must support encode_log after slice 2.3"
);
assert!(!buf.is_empty(), "buffer must contain encoded data");
}
#[test]
fn encode_log_default_does_not_write_to_buffer() {
let encoder = create_encoder(&EncoderConfig::PrometheusText { precision: None });
let event = make_log_event();
let mut buf = Vec::new();
let _ = encoder.encode_log(&event, &mut buf);
assert!(
buf.is_empty(),
"buffer must remain empty when encode_log returns an error"
);
}
#[test]
fn encode_log_error_is_encoder_variant() {
let encoder = create_encoder(&EncoderConfig::PrometheusText { precision: None });
let event = make_log_event();
let mut buf = Vec::new();
let result = encoder.encode_log(&event, &mut buf);
let err = result.unwrap_err();
assert!(
matches!(err, crate::SondaError::Encoder(_)),
"error must be SondaError::Encoder variant, got: {err:?}"
);
}
#[cfg(all(feature = "remote-write", feature = "config"))]
#[test]
fn encoder_config_remote_write_deserializes_from_yaml() {
let yaml = "type: remote_write";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(
matches!(config, EncoderConfig::RemoteWrite),
"should deserialize as RemoteWrite variant"
);
}
#[cfg(feature = "remote-write")]
#[test]
fn create_encoder_remote_write_succeeds() {
let config = EncoderConfig::RemoteWrite;
let _enc = create_encoder(&config);
}
#[cfg(feature = "remote-write")]
#[test]
fn encoder_config_remote_write_is_cloneable_and_debuggable() {
let config = EncoderConfig::RemoteWrite;
let cloned = config.clone();
assert!(matches!(cloned, EncoderConfig::RemoteWrite));
let s = format!("{config:?}");
assert!(
s.contains("RemoteWrite"),
"debug output should contain 'RemoteWrite', got: {s}"
);
}
#[cfg(feature = "remote-write")]
#[test]
fn remote_write_encoder_produces_valid_output_through_factory() {
use crate::model::metric::{Labels, MetricEvent};
use std::time::{Duration, UNIX_EPOCH};
let config = EncoderConfig::RemoteWrite;
let enc = create_encoder(&config);
let labels = Labels::from_pairs(&[("job", "sonda")]).unwrap();
let ts = UNIX_EPOCH + Duration::from_secs(1_700_000_000);
let event =
MetricEvent::with_timestamp("factory_test".to_string(), 10.0, labels, ts).unwrap();
let mut buf = Vec::new();
enc.encode_metric(&event, &mut buf)
.expect("encode through factory should succeed");
assert!(
!buf.is_empty(),
"factory-created encoder should produce output"
);
}
#[cfg(all(feature = "remote-write", feature = "config"))]
#[test]
fn scenario_yaml_with_remote_write_encoder_deserializes() {
use crate::config::ScenarioConfig;
use crate::sink::SinkConfig;
let yaml = r#"
name: rw_test_metric
rate: 10.0
generator:
type: constant
value: 1.0
encoder:
type: remote_write
sink:
type: remote_write
url: "http://localhost:8428/api/v1/write"
"#;
let config: ScenarioConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.name, "rw_test_metric");
assert!(matches!(config.encoder, EncoderConfig::RemoteWrite));
assert!(matches!(config.sink, SinkConfig::RemoteWrite { .. }));
}
#[test]
fn write_value_none_uses_default_display() {
let mut buf = Vec::new();
write_value(&mut buf, 1.0, None);
assert_eq!(String::from_utf8(buf).unwrap(), "1");
let mut buf = Vec::new();
write_value(&mut buf, 3.14159, None);
assert_eq!(String::from_utf8(buf).unwrap(), "3.14159");
}
#[test]
fn write_value_precision_0() {
let mut buf = Vec::new();
write_value(&mut buf, 99.6, Some(0));
assert_eq!(String::from_utf8(buf).unwrap(), "100");
}
#[test]
fn write_value_precision_2() {
let mut buf = Vec::new();
write_value(&mut buf, 99.60573, Some(2));
assert_eq!(String::from_utf8(buf).unwrap(), "99.61");
let mut buf = Vec::new();
write_value(&mut buf, 100.0, Some(2));
assert_eq!(String::from_utf8(buf).unwrap(), "100.00");
}
#[test]
fn write_value_precision_with_negative() {
let mut buf = Vec::new();
write_value(&mut buf, -3.14159, Some(2));
assert_eq!(String::from_utf8(buf).unwrap(), "-3.14");
}
#[test]
fn write_value_precision_4() {
let mut buf = Vec::new();
write_value(&mut buf, 1.23456789, Some(4));
assert_eq!(String::from_utf8(buf).unwrap(), "1.2346");
}
#[cfg(feature = "config")]
#[test]
fn prometheus_text_with_precision_deserializes() {
let yaml = "type: prometheus_text\nprecision: 3";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(
config,
EncoderConfig::PrometheusText { precision: Some(3) }
));
}
#[cfg(feature = "config")]
#[test]
fn prometheus_text_without_precision_defaults_to_none() {
let yaml = "type: prometheus_text";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(
config,
EncoderConfig::PrometheusText { precision: None }
));
}
#[cfg(feature = "config")]
#[test]
fn influx_with_precision_and_field_key_deserializes() {
let yaml = "type: influx_lp\nfield_key: gauge\nprecision: 2";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(
config,
EncoderConfig::InfluxLineProtocol {
field_key: Some(ref k),
precision: Some(2)
} if k == "gauge"
));
}
#[cfg(feature = "config")]
#[test]
fn json_lines_with_precision_deserializes() {
let yaml = "type: json_lines\nprecision: 5";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(
config,
EncoderConfig::JsonLines { precision: Some(5) }
));
}
#[cfg(feature = "config")]
#[test]
fn json_lines_without_precision_defaults_to_none() {
let yaml = "type: json_lines";
let config: EncoderConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert!(matches!(
config,
EncoderConfig::JsonLines { precision: None }
));
}
#[test]
fn format_rfc3339_millis_writes_to_buffer() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_774_008_000_000);
let mut buf = Vec::new();
format_rfc3339_millis(ts, &mut buf).unwrap();
assert_eq!(String::from_utf8(buf).unwrap(), "2026-03-20T12:00:00.000Z");
}
#[test]
fn format_rfc3339_millis_appends_to_existing_buffer() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_774_008_000_000);
let mut buf = b"prefix:".to_vec();
format_rfc3339_millis(ts, &mut buf).unwrap();
assert_eq!(
String::from_utf8(buf).unwrap(),
"prefix:2026-03-20T12:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_epoch_writes_correct_bytes() {
use std::time::UNIX_EPOCH;
let mut buf = Vec::new();
format_rfc3339_millis(UNIX_EPOCH, &mut buf).unwrap();
assert_eq!(String::from_utf8(buf).unwrap(), "1970-01-01T00:00:00.000Z");
}
#[test]
fn format_rfc3339_millis_before_epoch_returns_error() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH - Duration::from_secs(1);
let mut buf = Vec::new();
let result = format_rfc3339_millis(ts, &mut buf);
assert!(result.is_err(), "timestamps before epoch must return error");
assert!(
buf.is_empty(),
"buffer must remain empty on error (nothing written before failure)"
);
}
#[test]
fn format_rfc3339_millis_array_returns_correct_bytes() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_774_008_000_000);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2026-03-20T12:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_array_epoch() {
use std::time::UNIX_EPOCH;
let arr = format_rfc3339_millis_array(UNIX_EPOCH).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"1970-01-01T00:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_array_before_epoch_returns_error() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH - Duration::from_secs(1);
let result = format_rfc3339_millis_array(ts);
assert!(result.is_err());
let err = result.unwrap_err();
assert!(
matches!(err, crate::SondaError::Encoder(_)),
"error must be Encoder variant, got: {err:?}"
);
}
#[test]
fn format_rfc3339_millis_array_preserves_milliseconds() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_700_000_000_789);
let arr = format_rfc3339_millis_array(ts).unwrap();
let s = std::str::from_utf8(&arr).unwrap();
assert!(s.ends_with(".789Z"), "must end with .789Z but got: {s}");
}
#[test]
fn format_rfc3339_millis_array_and_buf_produce_identical_output() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_700_000_000_123);
let arr = format_rfc3339_millis_array(ts).unwrap();
let mut buf = Vec::new();
format_rfc3339_millis(ts, &mut buf).unwrap();
assert_eq!(&arr[..], &buf[..]);
}
#[test]
fn rfc3339_millis_len_constant_matches_output_size() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_774_008_000_000);
let mut buf = Vec::new();
format_rfc3339_millis(ts, &mut buf).unwrap();
assert_eq!(buf.len(), RFC3339_MILLIS_LEN);
}
#[test]
fn format_rfc3339_millis_leap_year_feb_29_2024() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_secs(1_709_164_800);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2024-02-29T00:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_non_leap_year_mar_1_2023() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_secs(1_677_628_800);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2023-03-01T00:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_century_leap_year_2000_feb_29() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_secs(951_782_400);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2000-02-29T00:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_century_non_leap_year_2100_mar_1() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_secs(4_107_542_400);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2100-03-01T00:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_century_non_leap_year_2100_feb_28() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(4_107_542_400_000 - 1);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2100-02-28T23:59:59.999Z"
);
}
#[test]
fn format_rfc3339_millis_dec_31_to_jan_1_transition() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_767_225_600_000 - 1);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2025-12-31T23:59:59.999Z"
);
}
#[test]
fn format_rfc3339_millis_jan_1_midnight() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_secs(1_767_225_600);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2026-01-01T00:00:00.000Z"
);
}
#[test]
fn format_rfc3339_millis_leap_year_dec_31_to_jan_1() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_735_689_600_000 - 1);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2024-12-31T23:59:59.999Z"
);
}
#[test]
fn format_rfc3339_millis_mid_day_with_millis() {
use std::time::{Duration, UNIX_EPOCH};
let ts = UNIX_EPOCH + Duration::from_millis(1_718_461_845_123);
let arr = format_rfc3339_millis_array(ts).unwrap();
assert_eq!(
std::str::from_utf8(&arr).unwrap(),
"2024-06-15T14:30:45.123Z"
);
}
}