1use crate::util::{AlertState, RuleHealth, TargetHealth};
3use enum_as_inner::EnumAsInner;
4use serde::Deserialize;
5use std::collections::HashMap;
6use std::fmt;
7use time::{Duration, OffsetDateTime, PrimitiveDateTime};
8use url::Url;
9
10mod de {
11 use serde::{
12 de::{Error as SerdeError, Unexpected},
13 Deserialize, Deserializer,
14 };
15 use std::str::FromStr;
16 use time::format_description::FormatItem;
17 use time::macros::format_description;
18 use time::{Duration, PrimitiveDateTime};
19
20 const BUILD_INFO_DATE_FORMAT: &[FormatItem] = format_description!(
21 "[year repr:full][month repr:numerical][day]-[hour repr:24]:[minute]:[second]"
22 );
23
24 pub(super) fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
25 where
26 D: Deserializer<'de>,
27 {
28 String::deserialize(deserializer).and_then(|s| {
29 f64::from_str(&s).map_err(|_| {
30 SerdeError::invalid_value(
31 Unexpected::Str(&s),
32 &"a float value inside a quoted JSON string",
33 )
34 })
35 })
36 }
37
38 pub(super) fn deserialize_build_info_date<'de, D>(
40 deserializer: D,
41 ) -> Result<PrimitiveDateTime, D::Error>
42 where
43 D: Deserializer<'de>,
44 {
45 String::deserialize(deserializer).and_then(|s| {
46 PrimitiveDateTime::parse(&s, &BUILD_INFO_DATE_FORMAT).map_err(|_| {
47 SerdeError::invalid_value(
48 Unexpected::Str(&s),
49 &"a datetime string in format <yyyymmdd-hh:mm:ss>",
50 )
51 })
52 })
53 }
54
55 pub(super) fn deserialize_prometheus_duration<'de, D>(
62 deserializer: D,
63 ) -> Result<Duration, D::Error>
64 where
65 D: Deserializer<'de>,
66 {
67 let raw_str = String::deserialize(deserializer)?;
68
69 let mut total_milliseconds: i64 = 0;
70
71 let mut raw_num = String::new();
74
75 let mut duration_iter = raw_str.chars().peekable();
78
79 while let Some(item) = duration_iter.next() {
80 if ('0'..='9').contains(&item) {
81 raw_num.push(item);
82 continue;
83 }
84
85 let num = raw_num.parse::<i64>().map_err(SerdeError::custom)?;
86
87 match item {
88 'y' => {
89 total_milliseconds += num * 1000 * 60 * 60 * 24 * 365;
90 }
91 'w' => {
92 total_milliseconds += num * 1000 * 60 * 60 * 24 * 7;
93 }
94 'd' => {
95 total_milliseconds += num * 1000 * 60 * 60 * 24;
96 }
97 'h' => {
98 total_milliseconds += num * 1000 * 60 * 60;
99 }
100 'm' => {
101 if duration_iter.next_if_eq(&'s').is_some() {
102 total_milliseconds += num * 1000 * 60 * 60;
103 } else {
104 total_milliseconds += num * 1000 * 60;
105 }
106 }
107 's' => {
108 total_milliseconds += num * 1000;
109 }
110 _ => return Err(SerdeError::custom("invalid time duration")),
111 };
112
113 raw_num.clear();
114 }
115
116 Ok(Duration::milliseconds(total_milliseconds))
117 }
118}
119
120#[derive(Debug, Deserialize)]
121#[serde(tag = "status")]
122pub(crate) enum ApiResponse<D> {
123 #[serde(alias = "success")]
124 Success { data: D },
125 #[serde(alias = "error")]
126 Error(crate::error::PrometheusError),
127}
128
129#[derive(Debug, Clone, Deserialize)]
130pub struct Stats {
131 timings: Timings,
132 samples: Samples,
133}
134
135impl Stats {
136 pub fn timings(&self) -> &Timings {
137 &self.timings
138 }
139
140 pub fn samples(&self) -> &Samples {
141 &self.samples
142 }
143}
144
145#[derive(Debug, Copy, Clone, Deserialize)]
146pub struct Timings {
147 #[serde(alias = "evalTotalTime")]
148 eval_total_time: f64,
149 #[serde(alias = "resultSortTime")]
150 result_sort_time: f64,
151 #[serde(alias = "queryPreparationTime")]
152 query_preparation_time: f64,
153 #[serde(alias = "innerEvalTime")]
154 inner_eval_time: f64,
155 #[serde(alias = "execQueueTime")]
156 exec_queue_time: f64,
157 #[serde(alias = "execTotalTime")]
158 exec_total_time: f64,
159}
160
161impl Timings {
162 pub fn eval_total_time(&self) -> f64 {
163 self.eval_total_time
164 }
165
166 pub fn result_sort_time(&self) -> f64 {
167 self.result_sort_time
168 }
169
170 pub fn query_preparation_time(&self) -> f64 {
171 self.query_preparation_time
172 }
173
174 pub fn inner_eval_time(&self) -> f64 {
175 self.inner_eval_time
176 }
177
178 pub fn exec_queue_time(&self) -> f64 {
179 self.exec_queue_time
180 }
181
182 pub fn exec_total_time(&self) -> f64 {
183 self.exec_total_time
184 }
185}
186
187#[derive(Debug, Clone, Deserialize)]
188pub struct Samples {
189 #[serde(alias = "totalQueryableSamplesPerStep")]
190 total_queryable_samples_per_step: Option<Vec<SamplesPerStep>>,
191 #[serde(alias = "totalQueryableSamples")]
192 total_queryable_samples: i64,
193 #[serde(alias = "peakSamples")]
194 peak_samples: i64,
195}
196
197impl Samples {
198 pub fn total_queryable_samples_per_step(&self) -> Option<&Vec<SamplesPerStep>> {
199 self.total_queryable_samples_per_step.as_ref()
200 }
201
202 pub fn total_queryable_samples(&self) -> i64 {
203 self.total_queryable_samples
204 }
205
206 pub fn peak_samples(&self) -> i64 {
207 self.peak_samples
208 }
209}
210
211#[derive(Clone, Copy, Debug, PartialEq, Deserialize)]
213pub struct SamplesPerStep {
214 pub(crate) timestamp: f64,
215 pub(crate) value: usize,
216}
217
218impl SamplesPerStep {
219 pub fn timestamp(&self) -> f64 {
221 self.timestamp
222 }
223
224 pub fn value(&self) -> usize {
226 self.value
227 }
228}
229
230#[derive(Debug, Clone, Deserialize)]
231pub struct PromqlResult {
232 #[serde(flatten)]
233 pub(crate) data: Data,
234 pub(crate) stats: Option<Stats>,
235}
236
237impl PromqlResult {
238 pub fn data(&self) -> &Data {
240 &self.data
241 }
242
243 pub fn stats(&self) -> Option<&Stats> {
245 self.stats.as_ref()
246 }
247
248 pub fn into_inner(self) -> (Data, Option<Stats>) {
250 (self.data, self.stats)
251 }
252}
253
254#[derive(Clone, Debug, Deserialize, EnumAsInner)]
256#[serde(tag = "resultType", content = "result")]
257pub enum Data {
258 #[serde(alias = "vector")]
259 Vector(Vec<InstantVector>),
260 #[serde(alias = "matrix")]
261 Matrix(Vec<RangeVector>),
262 #[serde(alias = "scalar")]
263 Scalar(Sample),
264}
265
266impl Data {
267 pub fn is_empty(&self) -> bool {
269 match self {
270 Data::Vector(v) => v.is_empty(),
271 Data::Matrix(v) => v.is_empty(),
272 Data::Scalar(_) => false,
273 }
274 }
275}
276
277#[derive(Clone, Debug, PartialEq, Deserialize)]
279pub struct InstantVector {
280 pub(crate) metric: HashMap<String, String>,
281 #[serde(alias = "value")]
282 pub(crate) sample: Sample,
283}
284
285impl InstantVector {
286 pub fn metric(&self) -> &HashMap<String, String> {
289 &self.metric
290 }
291
292 pub fn sample(&self) -> &Sample {
294 &self.sample
295 }
296
297 pub fn into_inner(self) -> (HashMap<String, String>, Sample) {
299 (self.metric, self.sample)
300 }
301}
302
303#[derive(Clone, Debug, PartialEq, Deserialize)]
305pub struct RangeVector {
306 pub(crate) metric: HashMap<String, String>,
307 #[serde(alias = "values")]
308 pub(crate) samples: Vec<Sample>,
309}
310
311impl RangeVector {
312 pub fn metric(&self) -> &HashMap<String, String> {
315 &self.metric
316 }
317
318 pub fn samples(&self) -> &[Sample] {
320 &self.samples
321 }
322
323 pub fn into_inner(self) -> (HashMap<String, String>, Vec<Sample>) {
325 (self.metric, self.samples)
326 }
327}
328
329#[derive(Clone, Copy, Debug, PartialEq, Deserialize)]
331pub struct Sample {
332 pub(crate) timestamp: f64,
333 #[serde(deserialize_with = "de::deserialize_f64")]
334 pub(crate) value: f64,
335}
336
337impl Sample {
338 pub fn timestamp(&self) -> f64 {
340 self.timestamp
341 }
342
343 pub fn value(&self) -> f64 {
345 self.value
346 }
347}
348
349#[derive(Clone, Debug, Deserialize)]
351pub struct Targets {
352 #[serde(alias = "activeTargets")]
353 pub(crate) active: Vec<ActiveTarget>,
354 #[serde(alias = "droppedTargets")]
355 pub(crate) dropped: Vec<DroppedTarget>,
356}
357
358impl Targets {
359 pub fn active(&self) -> &[ActiveTarget] {
361 &self.active
362 }
363
364 pub fn dropped(&self) -> &[DroppedTarget] {
366 &self.dropped
367 }
368}
369
370#[derive(Clone, Debug, Deserialize)]
372pub struct ActiveTarget {
373 #[serde(alias = "discoveredLabels")]
374 pub(crate) discovered_labels: HashMap<String, String>,
375 pub(crate) labels: HashMap<String, String>,
376 #[serde(alias = "scrapePool")]
377 pub(crate) scrape_pool: String,
378 #[serde(alias = "scrapeUrl")]
379 pub(crate) scrape_url: Url,
380 #[serde(alias = "globalUrl")]
381 pub(crate) global_url: Url,
382 #[serde(alias = "lastError")]
383 pub(crate) last_error: String,
384 #[serde(alias = "lastScrape")]
385 #[serde(with = "time::serde::rfc3339")]
386 pub(crate) last_scrape: OffsetDateTime,
387 #[serde(alias = "lastScrapeDuration")]
388 pub(crate) last_scrape_duration: f64,
389 pub(crate) health: TargetHealth,
390 #[serde(
391 alias = "scrapeInterval",
392 deserialize_with = "de::deserialize_prometheus_duration"
393 )]
394 pub(crate) scrape_interval: Duration,
395 #[serde(
396 alias = "scrapeTimeout",
397 deserialize_with = "de::deserialize_prometheus_duration"
398 )]
399 pub(crate) scrape_timeout: Duration,
400}
401
402impl ActiveTarget {
403 pub fn discovered_labels(&self) -> &HashMap<String, String> {
405 &self.discovered_labels
406 }
407
408 pub fn labels(&self) -> &HashMap<String, String> {
410 &self.labels
411 }
412
413 pub fn scrape_pool(&self) -> &str {
415 &self.scrape_pool
416 }
417
418 pub fn scrape_url(&self) -> &Url {
420 &self.scrape_url
421 }
422
423 pub fn global_url(&self) -> &Url {
425 &self.global_url
426 }
427
428 pub fn last_error(&self) -> &str {
430 &self.last_error
431 }
432
433 pub fn last_scrape(&self) -> &OffsetDateTime {
435 &self.last_scrape
436 }
437
438 pub fn last_scrape_duration(&self) -> f64 {
440 self.last_scrape_duration
441 }
442
443 pub fn health(&self) -> TargetHealth {
445 self.health
446 }
447
448 pub fn scrape_interval(&self) -> &Duration {
450 &self.scrape_interval
451 }
452
453 pub fn scrape_timeout(&self) -> &Duration {
455 &self.scrape_timeout
456 }
457}
458
459#[derive(Clone, Debug, Deserialize)]
461pub struct DroppedTarget {
462 #[serde(alias = "discoveredLabels")]
463 pub(crate) discovered_labels: HashMap<String, String>,
464}
465
466impl DroppedTarget {
467 pub fn discovered_labels(&self) -> &HashMap<String, String> {
469 &self.discovered_labels
470 }
471}
472
473#[derive(Debug, Deserialize)]
476pub(crate) struct RuleGroups {
477 pub groups: Vec<RuleGroup>,
478}
479
480#[derive(Clone, Debug, Deserialize)]
482pub struct RuleGroup {
483 pub(crate) rules: Vec<Rule>,
484 pub(crate) file: String,
485 pub(crate) interval: f64,
486 pub(crate) name: String,
487 #[serde(alias = "evaluationTime")]
488 pub(crate) evaluation_time: f64,
489 #[serde(alias = "lastEvaluation", with = "time::serde::rfc3339")]
490 pub(crate) last_evaluation: OffsetDateTime,
491 pub(crate) limit: usize,
492}
493
494impl RuleGroup {
495 pub fn rules(&self) -> &[Rule] {
497 &self.rules
498 }
499
500 pub fn file(&self) -> &str {
502 &self.file
503 }
504
505 pub fn interval(&self) -> f64 {
507 self.interval
508 }
509
510 pub fn name(&self) -> &str {
512 &self.name
513 }
514
515 pub fn last_evaluation(&self) -> &OffsetDateTime {
517 &self.last_evaluation
518 }
519
520 pub fn evaluation_time(&self) -> f64 {
523 self.evaluation_time
524 }
525
526 pub fn limit(&self) -> usize {
529 self.limit
530 }
531}
532
533#[derive(Clone, Debug, Deserialize)]
535#[serde(tag = "type")]
536pub enum Rule {
537 #[serde(alias = "recording")]
538 Recording(RecordingRule),
539 #[serde(alias = "alerting")]
540 Alerting(AlertingRule),
541}
542
543impl Rule {
544 pub fn as_recording(&self) -> Option<&RecordingRule> {
545 match self {
546 Self::Recording(rule) => Some(&rule),
547 _ => None,
548 }
549 }
550
551 pub fn as_alerting(&self) -> Option<&AlertingRule> {
552 match self {
553 Self::Alerting(rule) => Some(&rule),
554 _ => None,
555 }
556 }
557}
558
559#[derive(Clone, Debug, Deserialize)]
561pub struct AlertingRule {
562 pub(crate) alerts: Vec<Alert>,
563 pub(crate) annotations: HashMap<String, String>,
564 pub(crate) duration: f64,
565 pub(crate) health: RuleHealth,
566 pub(crate) labels: HashMap<String, String>,
567 pub(crate) name: String,
568 pub(crate) query: String,
569 #[serde(alias = "evaluationTime")]
570 pub(crate) evaluation_time: f64,
571 #[serde(alias = "lastEvaluation", with = "time::serde::rfc3339")]
572 pub(crate) last_evaluation: OffsetDateTime,
573 #[serde(alias = "keepFiringFor")]
574 pub(crate) keep_firing_for: f64,
575}
576
577impl AlertingRule {
578 pub fn alerts(&self) -> &[Alert] {
580 &self.alerts
581 }
582
583 pub fn annotations(&self) -> &HashMap<String, String> {
585 &self.annotations
586 }
587
588 pub fn duration(&self) -> f64 {
590 self.duration
591 }
592
593 pub fn health(&self) -> RuleHealth {
595 self.health
596 }
597
598 pub fn labels(&self) -> &HashMap<String, String> {
600 &self.labels
601 }
602
603 pub fn name(&self) -> &str {
605 &self.name
606 }
607
608 pub fn query(&self) -> &str {
610 &self.query
611 }
612
613 pub fn last_evaluation(&self) -> &OffsetDateTime {
615 &self.last_evaluation
616 }
617
618 pub fn evaluation_time(&self) -> f64 {
620 self.evaluation_time
621 }
622
623 pub fn keep_firing_for(&self) -> f64 {
626 self.keep_firing_for
627 }
628}
629
630#[derive(Clone, Debug, Deserialize)]
632pub struct RecordingRule {
633 pub(crate) health: RuleHealth,
634 pub(crate) name: String,
635 pub(crate) query: String,
636 pub(crate) labels: Option<HashMap<String, String>>,
637 #[serde(alias = "evaluationTime")]
638 pub(crate) evaluation_time: f64,
639 #[serde(alias = "lastEvaluation", with = "time::serde::rfc3339")]
640 pub(crate) last_evaluation: OffsetDateTime,
641}
642
643impl RecordingRule {
644 pub fn health(&self) -> RuleHealth {
646 self.health
647 }
648
649 pub fn name(&self) -> &str {
651 &self.name
652 }
653
654 pub fn query(&self) -> &str {
656 &self.query
657 }
658
659 pub fn labels(&self) -> &Option<HashMap<String, String>> {
661 &self.labels
662 }
663
664 pub fn last_evaluation(&self) -> &OffsetDateTime {
666 &self.last_evaluation
667 }
668
669 pub fn evaluation_time(&self) -> f64 {
671 self.evaluation_time
672 }
673}
674
675#[derive(Debug, Deserialize)]
678pub(crate) struct Alerts {
679 pub alerts: Vec<Alert>,
680}
681
682#[derive(Clone, Debug, Deserialize)]
684pub struct Alert {
685 #[serde(alias = "activeAt", with = "time::serde::rfc3339")]
686 pub(crate) active_at: OffsetDateTime,
687 pub(crate) annotations: HashMap<String, String>,
688 pub(crate) labels: HashMap<String, String>,
689 pub(crate) state: AlertState,
690 #[serde(deserialize_with = "de::deserialize_f64")]
691 pub(crate) value: f64,
692}
693
694impl Alert {
695 pub fn active_at(&self) -> &OffsetDateTime {
697 &self.active_at
698 }
699
700 pub fn annotations(&self) -> &HashMap<String, String> {
702 &self.annotations
703 }
704
705 pub fn labels(&self) -> &HashMap<String, String> {
707 &self.labels
708 }
709
710 pub fn state(&self) -> AlertState {
712 self.state
713 }
714
715 pub fn value(&self) -> f64 {
717 self.value
718 }
719}
720
721#[derive(Clone, Debug, Deserialize)]
723pub struct Alertmanagers {
724 #[serde(alias = "activeAlertmanagers")]
725 pub(crate) active: Vec<Alertmanager>,
726 #[serde(alias = "droppedAlertmanagers")]
727 pub(crate) dropped: Vec<Alertmanager>,
728}
729
730impl Alertmanagers {
731 pub fn active(&self) -> &[Alertmanager] {
733 &self.active
734 }
735
736 pub fn dropped(&self) -> &[Alertmanager] {
738 &self.dropped
739 }
740}
741
742#[derive(Clone, Debug, Deserialize)]
744pub struct Alertmanager {
745 url: Url,
746}
747
748impl Alertmanager {
749 pub fn url(&self) -> &Url {
751 &self.url
752 }
753}
754
755#[derive(Debug, Copy, Clone, Deserialize, Eq, PartialEq)]
757pub enum MetricType {
758 #[serde(alias = "counter")]
759 Counter,
760 #[serde(alias = "gauge")]
761 Gauge,
762 #[serde(alias = "histogram")]
763 Histogram,
764 #[serde(alias = "gaugehistogram")]
765 GaugeHistogram,
766 #[serde(alias = "summary")]
767 Summary,
768 #[serde(alias = "info")]
769 Info,
770 #[serde(alias = "stateset")]
771 Stateset,
772 #[serde(alias = "unknown")]
773 Unknown,
774}
775
776impl MetricType {
777 pub fn is_counter(&self) -> bool {
778 *self == Self::Counter
779 }
780
781 pub fn is_gauge(&self) -> bool {
782 *self == Self::Gauge
783 }
784
785 pub fn is_histogram(&self) -> bool {
786 *self == Self::Histogram
787 }
788
789 pub fn is_gauge_histogram(&self) -> bool {
790 *self == Self::GaugeHistogram
791 }
792
793 pub fn is_summary(&self) -> bool {
794 *self == Self::Summary
795 }
796
797 pub fn is_info(&self) -> bool {
798 *self == Self::Info
799 }
800
801 pub fn is_stateset(&self) -> bool {
802 *self == Self::Stateset
803 }
804
805 pub fn is_unknown(&self) -> bool {
806 *self == Self::Unknown
807 }
808}
809
810impl fmt::Display for MetricType {
811 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
812 match self {
813 MetricType::Counter => write!(f, "counter"),
814 MetricType::Gauge => write!(f, "gauge"),
815 MetricType::Histogram => write!(f, "histogram"),
816 MetricType::GaugeHistogram => write!(f, "gaugehistogram"),
817 MetricType::Summary => write!(f, "summary"),
818 MetricType::Info => write!(f, "info"),
819 MetricType::Stateset => write!(f, "stateset"),
820 MetricType::Unknown => write!(f, "unknown"),
821 }
822 }
823}
824
825#[derive(Clone, Debug, Deserialize)]
827pub struct TargetMetadata {
828 pub(crate) target: HashMap<String, String>,
829 #[serde(alias = "type")]
830 pub(crate) metric_type: MetricType,
831 pub(crate) metric: Option<String>,
832 pub(crate) help: String,
833 pub(crate) unit: String,
834}
835
836impl TargetMetadata {
837 pub fn target(&self) -> &HashMap<String, String> {
839 &self.target
840 }
841
842 pub fn metric_type(&self) -> MetricType {
844 self.metric_type
845 }
846
847 pub fn metric(&self) -> Option<&str> {
849 self.metric.as_deref()
850 }
851
852 pub fn help(&self) -> &str {
854 &self.help
855 }
856
857 pub fn unit(&self) -> &str {
859 &self.unit
860 }
861}
862
863#[derive(Clone, Debug, Deserialize)]
865pub struct MetricMetadata {
866 #[serde(alias = "type")]
867 pub(crate) metric_type: MetricType,
868 pub(crate) help: String,
869 pub(crate) unit: String,
870}
871
872impl MetricMetadata {
873 pub fn metric_type(&self) -> MetricType {
875 self.metric_type
876 }
877
878 pub fn help(&self) -> &str {
880 &self.help
881 }
882
883 pub fn unit(&self) -> &str {
885 &self.unit
886 }
887}
888
889#[derive(Clone, Debug, Deserialize)]
891pub struct BuildInformation {
892 pub(crate) version: String,
893 pub(crate) revision: String,
894 pub(crate) branch: String,
895 #[serde(alias = "buildUser")]
896 pub(crate) build_user: String,
897 #[serde(alias = "buildDate")]
898 #[serde(deserialize_with = "de::deserialize_build_info_date")]
899 pub(crate) build_date: PrimitiveDateTime,
900 #[serde(alias = "goVersion")]
901 pub(crate) go_version: String,
902}
903
904impl BuildInformation {
905 pub fn version(&self) -> &str {
907 &self.version
908 }
909
910 pub fn revision(&self) -> &str {
912 &self.revision
913 }
914
915 pub fn branch(&self) -> &str {
917 &self.branch
918 }
919
920 pub fn build_user(&self) -> &str {
922 &self.build_user
923 }
924
925 pub fn build_date(&self) -> &PrimitiveDateTime {
927 &self.build_date
928 }
929
930 pub fn go_version(&self) -> &str {
932 &self.go_version
933 }
934}
935
936#[derive(Clone, Debug, Deserialize)]
938pub struct RuntimeInformation {
939 #[serde(alias = "startTime", with = "time::serde::rfc3339")]
940 pub(crate) start_time: OffsetDateTime,
941 #[serde(alias = "CWD")]
942 pub(crate) cwd: String,
943 #[serde(alias = "reloadConfigSuccess")]
944 pub(crate) reload_config_success: bool,
945 #[serde(alias = "lastConfigTime", with = "time::serde::rfc3339")]
946 pub(crate) last_config_time: OffsetDateTime,
947 #[serde(alias = "corruptionCount")]
948 pub(crate) corruption_count: i64,
949 #[serde(alias = "goroutineCount")]
950 pub(crate) goroutine_count: usize,
951 #[serde(alias = "GOMAXPROCS")]
952 pub(crate) go_max_procs: usize,
953 #[serde(alias = "GOGC")]
954 pub(crate) go_gc: String,
955 #[serde(alias = "GODEBUG")]
956 pub(crate) go_debug: String,
957 #[serde(
958 alias = "storageRetention",
959 deserialize_with = "de::deserialize_prometheus_duration"
960 )]
961 pub(crate) storage_retention: Duration,
962}
963
964impl RuntimeInformation {
965 pub fn start_time(&self) -> &OffsetDateTime {
967 &self.start_time
968 }
969
970 pub fn cwd(&self) -> &str {
972 &self.cwd
973 }
974
975 pub fn reload_config_success(&self) -> bool {
977 self.reload_config_success
978 }
979
980 pub fn last_config_time(&self) -> &OffsetDateTime {
982 &self.last_config_time
983 }
984
985 pub fn corruption_count(&self) -> i64 {
986 self.corruption_count
987 }
988
989 pub fn goroutine_count(&self) -> usize {
990 self.goroutine_count
991 }
992
993 pub fn go_max_procs(&self) -> usize {
994 self.go_max_procs
995 }
996
997 pub fn go_gc(&self) -> &str {
998 &self.go_gc
999 }
1000
1001 pub fn go_debug(&self) -> &str {
1002 &self.go_debug
1003 }
1004
1005 pub fn storage_retention(&self) -> &Duration {
1006 &self.storage_retention
1007 }
1008}
1009
1010#[derive(Clone, Debug, Deserialize)]
1012pub struct TsdbStatistics {
1013 #[serde(alias = "headStats")]
1014 pub(crate) head_stats: HeadStatistics,
1015 #[serde(alias = "seriesCountByMetricName")]
1016 pub(crate) series_count_by_metric_name: Vec<TsdbItemCount>,
1017 #[serde(alias = "labelValueCountByLabelName")]
1018 pub(crate) label_value_count_by_label_name: Vec<TsdbItemCount>,
1019 #[serde(alias = "memoryInBytesByLabelName")]
1020 pub(crate) memory_in_bytes_by_label_name: Vec<TsdbItemCount>,
1021 #[serde(alias = "seriesCountByLabelValuePair")]
1022 pub(crate) series_count_by_label_value_pair: Vec<TsdbItemCount>,
1023}
1024
1025impl TsdbStatistics {
1026 pub fn head_stats(&self) -> HeadStatistics {
1028 self.head_stats
1029 }
1030
1031 pub fn series_count_by_metric_name(&self) -> &[TsdbItemCount] {
1033 &self.series_count_by_metric_name
1034 }
1035
1036 pub fn label_value_count_by_label_name(&self) -> &[TsdbItemCount] {
1038 &self.label_value_count_by_label_name
1039 }
1040
1041 pub fn memory_in_bytes_by_label_name(&self) -> &[TsdbItemCount] {
1043 &self.memory_in_bytes_by_label_name
1044 }
1045
1046 pub fn series_count_by_label_value_pair(&self) -> &[TsdbItemCount] {
1048 &self.series_count_by_label_value_pair
1049 }
1050}
1051
1052#[derive(Clone, Copy, Debug, Deserialize)]
1054pub struct HeadStatistics {
1055 #[serde(alias = "numSeries")]
1056 pub(crate) num_series: usize,
1057 #[serde(alias = "chunkCount")]
1058 pub(crate) chunk_count: usize,
1059 #[serde(alias = "minTime")]
1060 pub(crate) min_time: i64,
1061 #[serde(alias = "maxTime")]
1062 pub(crate) max_time: i64,
1063}
1064
1065impl HeadStatistics {
1066 pub fn num_series(&self) -> usize {
1068 self.num_series
1069 }
1070
1071 pub fn chunk_count(&self) -> usize {
1073 self.chunk_count
1074 }
1075
1076 pub fn min_time(&self) -> i64 {
1078 self.min_time
1079 }
1080
1081 pub fn max_time(&self) -> i64 {
1083 self.max_time
1084 }
1085}
1086
1087#[derive(Clone, Debug, Deserialize)]
1089pub struct TsdbItemCount {
1090 pub(crate) name: String,
1091 pub(crate) value: usize,
1092}
1093
1094impl TsdbItemCount {
1095 pub fn name(&self) -> &str {
1097 &self.name
1098 }
1099
1100 pub fn value(&self) -> usize {
1102 self.value
1103 }
1104}
1105
1106#[derive(Clone, Copy, Debug, Deserialize)]
1108pub struct WalReplayStatistics {
1109 pub(crate) min: usize,
1110 pub(crate) max: usize,
1111 pub(crate) current: usize,
1112 pub(crate) state: Option<WalReplayState>,
1113}
1114
1115impl WalReplayStatistics {
1116 pub fn min(&self) -> usize {
1117 self.min
1118 }
1119
1120 pub fn max(&self) -> usize {
1121 self.max
1122 }
1123
1124 pub fn current(&self) -> usize {
1125 self.current
1126 }
1127
1128 pub fn state(&self) -> Option<WalReplayState> {
1129 self.state
1130 }
1131}
1132
1133#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq)]
1134pub enum WalReplayState {
1135 #[serde(alias = "waiting")]
1136 Waiting,
1137 #[serde(alias = "in progress")]
1138 InProgress,
1139 #[serde(alias = "done")]
1140 Done,
1141}
1142
1143impl WalReplayState {
1144 pub fn is_waiting(&self) -> bool {
1145 *self == Self::Waiting
1146 }
1147
1148 pub fn is_in_progress(&self) -> bool {
1149 *self == Self::InProgress
1150 }
1151
1152 pub fn is_done(&self) -> bool {
1153 *self == Self::Done
1154 }
1155}
1156
1157#[cfg(test)]
1158mod tests {
1159 use super::*;
1163 use std::collections::HashMap;
1164 use time::macros::datetime;
1165
1166 #[test]
1167 fn test_api_error_deserialization() -> Result<(), anyhow::Error> {
1168 let data = r#"
1169{
1170 "status": "error",
1171 "data": null,
1172 "errorType": "bad_data",
1173 "error": "1:14: parse error: unexpected end of input in aggregation",
1174 "warnings": []
1175}
1176"#;
1177
1178 let result = serde_json::from_str::<ApiResponse<PromqlResult>>(data)?;
1179 assert!(
1180 matches!(result, ApiResponse::Error(err) if err.error_type == crate::error::PrometheusErrorType::BadData)
1181 );
1182
1183 Ok(())
1184 }
1185
1186 #[test]
1187 fn test_api_success_deserialization() -> Result<(), anyhow::Error> {
1188 let data = r#"
1189{
1190 "status": "success",
1191 "data": {
1192 "resultType": "scalar",
1193 "result": [ 0, "0.0" ]
1194 },
1195 "warnings": []
1196}
1197"#;
1198
1199 let result = serde_json::from_str::<ApiResponse<PromqlResult>>(data)?;
1200 assert!(matches!(result, ApiResponse::Success { data: _ }));
1201
1202 Ok(())
1203 }
1204
1205 #[test]
1206 fn test_bad_combination_in_deserialization() -> Result<(), anyhow::Error> {
1207 let data = r#"
1208{
1209 "status": "error",
1210 "data": {
1211 "resultType": "scalar",
1212 "result": [ 0, "0.0" ]
1213 },
1214 "warnings": []
1215}
1216"#;
1217
1218 let result = serde_json::from_str::<ApiResponse<()>>(data);
1219 assert!(result.is_err());
1220
1221 Ok(())
1222 }
1223
1224 #[test]
1225 fn test_another_bad_combination_in_deserialization() -> Result<(), anyhow::Error> {
1226 let data = r#"
1227{
1228 "status": "success",
1229 "warnings": []
1230 "errorType": "bad_data",
1231 "error": "1:14: parse error: unexpected end of input in aggregation",
1232}
1233"#;
1234
1235 let result = serde_json::from_str::<ApiResponse<()>>(data);
1236 assert!(result.is_err());
1237
1238 Ok(())
1239 }
1240
1241 #[test]
1242 fn test_query_result_deserialization() -> Result<(), anyhow::Error> {
1243 let data = r#"
1244{
1245 "resultType": "matrix",
1246 "result": [
1247 {
1248 "metric": {
1249 "__name__": "up",
1250 "instance": "localhost:9090",
1251 "job": "prometheus"
1252 },
1253 "values": [
1254 [
1255 1659268100,
1256 "1"
1257 ],
1258 [
1259 1659268160,
1260 "1"
1261 ],
1262 [
1263 1659268220,
1264 "1"
1265 ],
1266 [
1267 1659268280,
1268 "1"
1269 ]
1270 ]
1271 }
1272 ],
1273 "stats": {
1274 "timings": {
1275 "evalTotalTime": 0.000102139,
1276 "resultSortTime": 8.7e-07,
1277 "queryPreparationTime": 5.4169e-05,
1278 "innerEvalTime": 3.787e-05,
1279 "execQueueTime": 4.07e-05,
1280 "execTotalTime": 0.000151989
1281 },
1282 "samples": {
1283 "totalQueryableSamplesPerStep": [
1284 [
1285 1659268100,
1286 1
1287 ],
1288 [
1289 1659268160,
1290 1
1291 ],
1292 [
1293 1659268220,
1294 1
1295 ],
1296 [
1297 1659268280,
1298 1
1299 ]
1300 ],
1301 "totalQueryableSamples": 4,
1302 "peakSamples": 4
1303 }
1304 }
1305}
1306"#;
1307 let result = serde_json::from_str::<PromqlResult>(data)?;
1308 let data = &result.data;
1309 assert!(data.is_matrix());
1310 let matrix = data.as_matrix().unwrap();
1311 assert!(matrix.len() == 1);
1312 let range_vector = &matrix[0];
1313 let metric = &range_vector.metric();
1314 assert!(metric.len() == 3);
1315 assert!(metric.get("__name__").is_some_and(|v| v == "up"));
1316 assert!(metric
1317 .get("instance")
1318 .is_some_and(|v| v == "localhost:9090"));
1319 assert!(metric.get("job").is_some_and(|v| v == "prometheus"));
1320 let samples = range_vector.samples();
1321 assert!(samples.len() == 4);
1322 assert!(samples[0].timestamp() == 1659268100.0);
1323 assert!(samples[0].value() == 1.0);
1324 assert!(samples[1].timestamp() == 1659268160.0);
1325 assert!(samples[1].value() == 1.0);
1326 assert!(samples[2].timestamp() == 1659268220.0);
1327 assert!(samples[2].value() == 1.0);
1328 assert!(samples[3].timestamp() == 1659268280.0);
1329 assert!(samples[3].value() == 1.0);
1330 assert!(result.stats().is_some());
1331 let stats = result.stats().unwrap();
1332 let timings = stats.timings();
1333 assert!(timings.eval_total_time() == 0.000102139);
1334 assert!(timings.result_sort_time() == 8.7e-07_f64);
1335 assert!(timings.query_preparation_time() == 5.4169e-05_f64);
1336 assert!(timings.inner_eval_time() == 3.787e-05_f64);
1337 assert!(timings.exec_queue_time() == 4.07e-05_f64);
1338 assert!(timings.exec_total_time() == 0.000151989);
1339 let samples = stats.samples();
1340 assert!(samples.peak_samples() == 4);
1341 assert!(samples.total_queryable_samples() == 4);
1342 assert!(samples.total_queryable_samples_per_step().is_some());
1343 let per_step = samples.total_queryable_samples_per_step().unwrap();
1344 assert!(per_step.len() == 4);
1345 assert!(per_step[0].timestamp() == 1659268100.0);
1346 assert!(per_step[0].value() == 1);
1347 assert!(per_step[1].timestamp() == 1659268160.0);
1348 assert!(per_step[1].value() == 1);
1349 assert!(per_step[2].timestamp() == 1659268220.0);
1350 assert!(per_step[2].value() == 1);
1351 assert!(per_step[3].timestamp() == 1659268280.0);
1352 assert!(per_step[3].value() == 1);
1353 Ok(())
1354 }
1355
1356 #[test]
1357 fn test_query_result_no_per_step_stats_deserialization() -> Result<(), anyhow::Error> {
1358 let data = r#"
1359{
1360 "resultType": "matrix",
1361 "result": [
1362 {
1363 "metric": {
1364 "__name__": "up",
1365 "instance": "localhost:9090",
1366 "job": "prometheus"
1367 },
1368 "values": [
1369 [
1370 1659268100,
1371 "1"
1372 ],
1373 [
1374 1659268160,
1375 "1"
1376 ],
1377 [
1378 1659268220,
1379 "1"
1380 ],
1381 [
1382 1659268280,
1383 "1"
1384 ]
1385 ]
1386 }
1387 ],
1388 "stats": {
1389 "timings": {
1390 "evalTotalTime": 0.000102139,
1391 "resultSortTime": 8.7e-07,
1392 "queryPreparationTime": 5.4169e-05,
1393 "innerEvalTime": 3.787e-05,
1394 "execQueueTime": 4.07e-05,
1395 "execTotalTime": 0.000151989
1396 },
1397 "samples": {
1398 "totalQueryableSamples": 4,
1399 "peakSamples": 4
1400 }
1401 }
1402}
1403"#;
1404 let result = serde_json::from_str::<PromqlResult>(data)?;
1405 assert!(result.stats().is_some());
1406 let stats = result.stats().unwrap();
1407 assert!(stats.samples().total_queryable_samples_per_step().is_none());
1408
1409 Ok(())
1410 }
1411
1412 #[test]
1413 fn test_query_result_no_stats_deserialization() -> Result<(), anyhow::Error> {
1414 let data = r#"
1415{
1416 "resultType": "matrix",
1417 "result": [
1418 {
1419 "metric": {
1420 "__name__": "up",
1421 "instance": "localhost:9090",
1422 "job": "prometheus"
1423 },
1424 "values": [
1425 [
1426 1659268100,
1427 "1"
1428 ],
1429 [
1430 1659268160,
1431 "1"
1432 ],
1433 [
1434 1659268220,
1435 "1"
1436 ],
1437 [
1438 1659268280,
1439 "1"
1440 ]
1441 ]
1442 }
1443 ]
1444}
1445"#;
1446 let result = serde_json::from_str::<PromqlResult>(data)?;
1447 assert!(result.stats().is_none());
1448
1449 Ok(())
1450 }
1451
1452 #[test]
1453 fn test_instant_vector_deserialization() -> Result<(), anyhow::Error> {
1454 let data = r#"
1455[
1456 {
1457 "metric": {
1458 "__name__": "up",
1459 "job": "prometheus",
1460 "instance": "localhost:9090"
1461 },
1462 "value": [
1463 1435781451.781,
1464 "1"
1465 ]
1466 },
1467 {
1468 "metric": {
1469 "__name__": "up",
1470 "job": "node",
1471 "instance": "localhost:9100"
1472 },
1473 "value": [
1474 1435781451.781,
1475 "0"
1476 ]
1477 }
1478]
1479"#;
1480 serde_json::from_str::<Vec<InstantVector>>(data)?;
1481 Ok(())
1482 }
1483
1484 #[test]
1485 fn test_range_vector_deserialization() -> Result<(), anyhow::Error> {
1486 let data = r#"
1487[
1488 {
1489 "metric": {
1490 "__name__": "up",
1491 "job": "prometheus",
1492 "instance": "localhost:9090"
1493 },
1494 "values": [
1495 [
1496 1435781430.781,
1497 "1"
1498 ],
1499 [
1500 1435781445.781,
1501 "1"
1502 ],
1503 [
1504 1435781460.781,
1505 "1"
1506 ]
1507 ]
1508 },
1509 {
1510 "metric": {
1511 "__name__": "up",
1512 "job": "node",
1513 "instance": "localhost:9091"
1514 },
1515 "values": [
1516 [
1517 1435781430.781,
1518 "0"
1519 ],
1520 [
1521 1435781445.781,
1522 "0"
1523 ],
1524 [
1525 1435781460.781,
1526 "1"
1527 ]
1528 ]
1529 }
1530]
1531"#;
1532 serde_json::from_str::<Vec<RangeVector>>(data)?;
1533 Ok(())
1534 }
1535
1536 #[test]
1537 fn test_target_deserialization() -> Result<(), anyhow::Error> {
1538 let data = r#"
1539{
1540 "activeTargets": [
1541 {
1542 "discoveredLabels": {
1543 "__address__": "127.0.0.1:9090",
1544 "__metrics_path__": "/metrics",
1545 "__scheme__": "http",
1546 "job": "prometheus"
1547 },
1548 "labels": {
1549 "instance": "127.0.0.1:9090",
1550 "job": "prometheus"
1551 },
1552 "scrapePool": "prometheus",
1553 "scrapeUrl": "http://127.0.0.1:9090/metrics",
1554 "globalUrl": "http://example-prometheus:9090/metrics",
1555 "lastError": "",
1556 "lastScrape": "2017-01-17T15:07:44.723715405+01:00",
1557 "lastScrapeDuration": 0.050688943,
1558 "health": "up",
1559 "scrapeInterval": "1m",
1560 "scrapeTimeout": "10s"
1561 }
1562 ],
1563 "droppedTargets": [
1564 {
1565 "discoveredLabels": {
1566 "__address__": "127.0.0.1:9100",
1567 "__metrics_path__": "/metrics",
1568 "__scheme__": "http",
1569 "__scrape_interval__": "1m",
1570 "__scrape_timeout__": "10s",
1571 "job": "node"
1572 }
1573 }
1574 ]
1575}
1576"#;
1577 let targets = serde_json::from_str::<Targets>(data)?;
1578 let active = &targets.active();
1579 assert!(active.len() == 1);
1580 let target = &active[0];
1581 assert!(target
1582 .discovered_labels()
1583 .get("__address__")
1584 .is_some_and(|v| v == "127.0.0.1:9090"));
1585 assert!(target
1586 .discovered_labels()
1587 .get("__metrics_path__")
1588 .is_some_and(|v| v == "/metrics"));
1589 assert!(target
1590 .discovered_labels()
1591 .get("__scheme__")
1592 .is_some_and(|v| v == "http"));
1593 assert!(target
1594 .discovered_labels()
1595 .get("job")
1596 .is_some_and(|v| v == "prometheus"));
1597 assert!(target
1598 .labels()
1599 .get("instance")
1600 .is_some_and(|v| v == "127.0.0.1:9090"));
1601 assert!(target
1602 .labels()
1603 .get("job")
1604 .is_some_and(|v| v == "prometheus"));
1605 assert!(target.scrape_pool() == "prometheus");
1606 assert!(target.scrape_url() == &Url::parse("http://127.0.0.1:9090/metrics")?);
1607 assert!(target.global_url() == &Url::parse("http://example-prometheus:9090/metrics")?);
1608 assert!(target.last_error().is_empty());
1609 assert!(target.last_scrape() == &datetime!(2017-01-17 15:07:44.723715405 +1));
1610 assert!(target.last_scrape_duration() == 0.050688943);
1611 assert!(target.health().is_up());
1612 assert!(target.scrape_interval() == &Duration::seconds(60));
1613 assert!(target.scrape_timeout() == &Duration::seconds(10));
1614 let dropped = &targets.dropped();
1615 assert!(dropped.len() == 1);
1616 let target = &dropped[0];
1617 assert!(target
1618 .discovered_labels()
1619 .get("__address__")
1620 .is_some_and(|v| v == "127.0.0.1:9100"));
1621 assert!(target
1622 .discovered_labels()
1623 .get("__metrics_path__")
1624 .is_some_and(|v| v == "/metrics"));
1625 assert!(target
1626 .discovered_labels()
1627 .get("__scheme__")
1628 .is_some_and(|v| v == "http"));
1629 assert!(target
1630 .discovered_labels()
1631 .get("__scrape_interval__")
1632 .is_some_and(|v| v == "1m"));
1633 assert!(target
1634 .discovered_labels()
1635 .get("__scrape_timeout__")
1636 .is_some_and(|v| v == "10s"));
1637 assert!(target
1638 .discovered_labels()
1639 .get("job")
1640 .is_some_and(|v| v == "node"));
1641 Ok(())
1642 }
1643
1644 #[test]
1645 fn test_rule_group_deserialization() -> Result<(), anyhow::Error> {
1646 let data = r#"
1647{
1648 "groups": [
1649 {
1650 "rules": [
1651 {
1652 "alerts": [
1653 {
1654 "activeAt": "2018-07-04T20:27:12.60602144+02:00",
1655 "annotations": {
1656 "summary": "High request latency"
1657 },
1658 "labels": {
1659 "alertname": "HighRequestLatency",
1660 "severity": "page"
1661 },
1662 "state": "firing",
1663 "value": "1e+00"
1664 }
1665 ],
1666 "annotations": {
1667 "summary": "High request latency"
1668 },
1669 "duration": 600,
1670 "health": "ok",
1671 "labels": {
1672 "severity": "page"
1673 },
1674 "name": "HighRequestLatency",
1675 "query": "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
1676 "type": "alerting",
1677 "evaluationTime": 0.000312805,
1678 "lastEvaluation": "2023-10-05T19:51:25.462004334+02:00",
1679 "keepFiringFor": 60
1680 },
1681 {
1682 "health": "ok",
1683 "name": "job:http_inprogress_requests:sum",
1684 "query": "sum by (job) (http_inprogress_requests)",
1685 "type": "recording",
1686 "evaluationTime": 0.000256946,
1687 "lastEvaluation": "2023-10-05T19:51:25.052982522+02:00"
1688 }
1689 ],
1690 "file": "/rules.yaml",
1691 "interval": 60,
1692 "limit": 0,
1693 "name": "example",
1694 "evaluationTime": 0.000267716,
1695 "lastEvaluation": "2023-10-05T19:51:25.052974842+02:00"
1696 }
1697 ]
1698}
1699"#;
1700 let groups = serde_json::from_str::<RuleGroups>(data)?.groups;
1701 assert!(groups.len() == 1);
1702 let group = &groups[0];
1703 assert!(group.name() == "example");
1704 assert!(group.file() == "/rules.yaml");
1705 assert!(group.interval() == 60.0);
1706 assert!(group.limit() == 0);
1707 assert!(group.evaluation_time() == 0.000267716);
1708 assert!(group.last_evaluation() == &datetime!(2023-10-05 7:51:25.052974842 pm +2));
1709 assert!(group.rules().len() == 2);
1710 let alerting_rule = &group.rules[0].as_alerting().unwrap();
1711 assert!(alerting_rule.health() == RuleHealth::Good);
1712 assert!(alerting_rule.name() == "HighRequestLatency");
1713 assert!(alerting_rule.query() == "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5");
1714 assert!(alerting_rule.evaluation_time() == 0.000312805);
1715 assert!(alerting_rule.last_evaluation() == &datetime!(2023-10-05 7:51:25.462004334 pm +2));
1716 assert!(alerting_rule.duration() == 600.0);
1717 assert!(alerting_rule.keep_firing_for() == 60.0);
1718 assert!(alerting_rule.alerts().len() == 1);
1719 assert!(alerting_rule
1720 .annotations()
1721 .get("summary")
1722 .is_some_and(|v| v == "High request latency"));
1723 let alert = &alerting_rule.alerts()[0];
1724 assert!(alert.value() == 1.0);
1725 assert!(alert.state().is_firing());
1726 assert!(alert.active_at() == &datetime!(2018-07-04 20:27:12.60602144 +2));
1727 let recording_rule = &group.rules[1].as_recording().unwrap();
1728 assert!(recording_rule.health() == RuleHealth::Good);
1729 assert!(recording_rule.name() == "job:http_inprogress_requests:sum");
1730 assert!(recording_rule.query() == "sum by (job) (http_inprogress_requests)");
1731 assert!(recording_rule.evaluation_time() == 0.000256946);
1732 assert!(recording_rule.last_evaluation() == &datetime!(2023-10-05 7:51:25.052982522 pm +2));
1733 Ok(())
1734 }
1735
1736 #[test]
1737 fn test_alert_deserialization() -> Result<(), anyhow::Error> {
1738 let data = r#"
1739{
1740 "alerts": [
1741 {
1742 "activeAt":"2018-07-04T20:27:12.60602144+02:00",
1743 "annotations":{
1744 },
1745 "labels":{
1746 "alertname":"my-alert"
1747 },
1748 "state":"firing",
1749 "value":"1e+00"
1750 }
1751 ]
1752}
1753"#;
1754 serde_json::from_str::<Alerts>(data)?;
1755 Ok(())
1756 }
1757
1758 #[test]
1759 fn test_target_metadata_deserialization_1() -> Result<(), anyhow::Error> {
1760 let data = r#"
1761[
1762 {
1763 "target": {
1764 "instance": "127.0.0.1:9090",
1765 "job": "prometheus"
1766 },
1767 "type": "gauge",
1768 "help": "Number of goroutines that currently exist.",
1769 "unit": ""
1770 },
1771 {
1772 "target": {
1773 "instance": "127.0.0.1:9091",
1774 "job": "prometheus"
1775 },
1776 "type": "gauge",
1777 "help": "Number of goroutines that currently exist.",
1778 "unit": ""
1779 },
1780 {
1781 "target": {
1782 "instance": "localhost:9090",
1783 "job": "prometheus"
1784 },
1785 "metric": "process_virtual_memory_bytes",
1786 "type": "gauge",
1787 "help": "Virtual memory size in bytes.",
1788 "unit": ""
1789 },
1790 {
1791 "target": {
1792 "instance": "localhost:9090",
1793 "job": "prometheus"
1794 },
1795 "metric": "prometheus_http_response_size_bytes",
1796 "type": "histogram",
1797 "help": "Histogram of response size for HTTP requests.",
1798 "unit": ""
1799 },
1800 {
1801 "target": {
1802 "instance": "localhost:9090",
1803 "job": "prometheus"
1804 },
1805 "metric": "prometheus_ready",
1806 "type": "gauge",
1807 "help": "Whether Prometheus startup was fully completed and the server is ready for normal operation.",
1808 "unit": ""
1809 },
1810 {
1811 "target": {
1812 "instance": "localhost:9090",
1813 "job": "prometheus"
1814 },
1815 "metric": "prometheus_rule_group_iterations_missed_total",
1816 "type": "counter",
1817 "help": "The total number of rule group evaluations missed due to slow rule group evaluation.",
1818 "unit": ""
1819 },
1820 {
1821 "target": {
1822 "instance": "localhost:9090",
1823 "job": "prometheus"
1824 },
1825 "metric": "prometheus_target_scrape_pool_reloads_failed_total",
1826 "type": "counter",
1827 "help": "Total number of failed scrape pool reloads.",
1828 "unit": ""
1829 },
1830 {
1831 "target": {
1832 "instance": "localhost:9090",
1833 "job": "prometheus"
1834 },
1835 "metric": "prometheus_target_scrape_pool_reloads_total",
1836 "type": "counter",
1837 "help": "Total number of scrape pool reloads.",
1838 "unit": ""
1839 }
1840]
1841"#;
1842 let metadata = serde_json::from_str::<Vec<TargetMetadata>>(data)?;
1843 assert!(metadata.len() == 8);
1844 let first = &metadata[0];
1845 assert!(first
1846 .target()
1847 .get("instance")
1848 .is_some_and(|v| v == "127.0.0.1:9090"));
1849 assert!(first.target().get("job").is_some_and(|v| v == "prometheus"));
1850 assert!(first.metric_type().is_gauge());
1851 assert!(first.help() == "Number of goroutines that currently exist.");
1852 assert!(first.unit().is_empty());
1853 assert!(first.metric().is_none());
1854 let third = &metadata[2];
1855 assert!(third
1856 .target()
1857 .get("instance")
1858 .is_some_and(|v| v == "localhost:9090"));
1859 assert!(third.target().get("job").is_some_and(|v| v == "prometheus"));
1860 assert!(third.metric_type().is_gauge());
1861 assert!(third.help() == "Virtual memory size in bytes.");
1862 assert!(third.unit().is_empty());
1863 assert!(third
1864 .metric()
1865 .is_some_and(|v| v == "process_virtual_memory_bytes"));
1866 let fourth = &metadata[3];
1867 assert!(fourth
1868 .target()
1869 .get("instance")
1870 .is_some_and(|v| v == "localhost:9090"));
1871 assert!(fourth
1872 .target()
1873 .get("job")
1874 .is_some_and(|v| v == "prometheus"));
1875 assert!(fourth.metric_type().is_histogram());
1876 assert!(fourth.help() == "Histogram of response size for HTTP requests.");
1877 assert!(fourth.unit().is_empty());
1878 assert!(fourth
1879 .metric()
1880 .is_some_and(|v| v == "prometheus_http_response_size_bytes"));
1881 Ok(())
1882 }
1883
1884 #[test]
1885 fn test_target_metadata_deserialization_2() -> Result<(), anyhow::Error> {
1886 let data = r#"
1887[
1888 {
1889 "target": {
1890 "instance": "127.0.0.1:9090",
1891 "job": "prometheus"
1892 },
1893 "metric": "prometheus_treecache_zookeeper_failures_total",
1894 "type": "counter",
1895 "help": "The total number of ZooKeeper failures.",
1896 "unit": ""
1897 },
1898 {
1899 "target": {
1900 "instance": "127.0.0.1:9090",
1901 "job": "prometheus"
1902 },
1903 "metric": "prometheus_tsdb_reloads_total",
1904 "type": "counter",
1905 "help": "Number of times the database reloaded block data from disk.",
1906 "unit": ""
1907 }
1908]
1909"#;
1910 serde_json::from_str::<Vec<TargetMetadata>>(data)?;
1911 Ok(())
1912 }
1913
1914 #[test]
1915 fn test_metric_metadata_deserialization() -> Result<(), anyhow::Error> {
1916 let data = r#"
1917{
1918 "cortex_ring_tokens": [
1919 {
1920 "type": "gauge",
1921 "help": "Number of tokens in the ring",
1922 "unit": ""
1923 }
1924 ],
1925 "http_requests_total": [
1926 {
1927 "type": "counter",
1928 "help": "Number of HTTP requests",
1929 "unit": ""
1930 },
1931 {
1932 "type": "counter",
1933 "help": "Amount of HTTP requests",
1934 "unit": ""
1935 }
1936 ]
1937}
1938"#;
1939 let metadata = serde_json::from_str::<HashMap<String, Vec<MetricMetadata>>>(data)?;
1940 assert!(metadata.len() == 2);
1941 assert!(metadata
1942 .get("cortex_ring_tokens")
1943 .is_some_and(|v| v[0].metric_type().is_gauge()
1944 && v[0].help() == "Number of tokens in the ring"
1945 && v[0].unit().is_empty()));
1946 assert!(metadata.get("http_requests_total").is_some_and(|v| v[0]
1947 .metric_type()
1948 .is_counter()
1949 && v[0].help() == "Number of HTTP requests"
1950 && v[0].unit().is_empty()));
1951 Ok(())
1952 }
1953
1954 #[test]
1955 fn test_alertmanagers_deserialization() -> Result<(), anyhow::Error> {
1956 let data = r#"
1957{
1958 "activeAlertmanagers": [
1959 {
1960 "url": "http://127.0.0.1:9090/api/v1/alerts"
1961 }
1962 ],
1963 "droppedAlertmanagers": [
1964 {
1965 "url": "http://127.0.0.1:9093/api/v1/alerts"
1966 }
1967 ]
1968}
1969"#;
1970 serde_json::from_str::<Alertmanagers>(data)?;
1971 Ok(())
1972 }
1973
1974 #[test]
1975 fn test_buildinformation_deserialization() -> Result<(), anyhow::Error> {
1976 let data = r#"
1977{
1978 "version": "2.13.1",
1979 "revision": "cb7cbad5f9a2823a622aaa668833ca04f50a0ea7",
1980 "branch": "master",
1981 "buildUser": "julius@desktop",
1982 "buildDate": "20191102-16:19:51",
1983 "goVersion": "go1.13.1"
1984}
1985"#;
1986 serde_json::from_str::<BuildInformation>(data)?;
1987 Ok(())
1988 }
1989
1990 #[test]
1991 fn test_runtimeinformation_deserialization() -> Result<(), anyhow::Error> {
1992 let data = r#"
1993{
1994 "startTime": "2019-11-02T17:23:59.301361365+01:00",
1995 "CWD": "/",
1996 "reloadConfigSuccess": true,
1997 "lastConfigTime": "2019-11-02T17:23:59+01:00",
1998 "timeSeriesCount": 873,
1999 "corruptionCount": 0,
2000 "goroutineCount": 48,
2001 "GOMAXPROCS": 4,
2002 "GOGC": "",
2003 "GODEBUG": "",
2004 "storageRetention": "15d"
2005}
2006"#;
2007 serde_json::from_str::<RuntimeInformation>(data)?;
2008 Ok(())
2009 }
2010
2011 #[test]
2012 fn test_tsdb_stats_deserialization() -> Result<(), anyhow::Error> {
2013 let data = r#"
2014{
2015 "headStats": {
2016 "numSeries": 508,
2017 "chunkCount": 937,
2018 "minTime": 1591516800000,
2019 "maxTime": 1598896800143
2020 },
2021 "seriesCountByMetricName": [
2022 {
2023 "name": "net_conntrack_dialer_conn_failed_total",
2024 "value": 20
2025 },
2026 {
2027 "name": "prometheus_http_request_duration_seconds_bucket",
2028 "value": 20
2029 }
2030 ],
2031 "labelValueCountByLabelName": [
2032 {
2033 "name": "__name__",
2034 "value": 211
2035 },
2036 {
2037 "name": "event",
2038 "value": 3
2039 }
2040 ],
2041 "memoryInBytesByLabelName": [
2042 {
2043 "name": "__name__",
2044 "value": 8266
2045 },
2046 {
2047 "name": "instance",
2048 "value": 28
2049 }
2050 ],
2051 "seriesCountByLabelValuePair": [
2052 {
2053 "name": "job=prometheus",
2054 "value": 425
2055 },
2056 {
2057 "name": "instance=localhost:9090",
2058 "value": 425
2059 }
2060 ]
2061}
2062"#;
2063 serde_json::from_str::<TsdbStatistics>(data)?;
2064 Ok(())
2065 }
2066
2067 #[test]
2068 fn test_wal_replay_deserialization() -> Result<(), anyhow::Error> {
2069 let data = r#"
2070{
2071 "min": 2,
2072 "max": 5,
2073 "current": 40,
2074 "state": "waiting"
2075}
2076"#;
2077 let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
2078 assert!(result.is_ok());
2079
2080 let data = r#"
2081{
2082 "min": 2,
2083 "max": 5,
2084 "current": 40,
2085 "state": "in progress"
2086}
2087"#;
2088 let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
2089 assert!(result.is_ok());
2090
2091 let data = r#"
2092{
2093 "min": 2,
2094 "max": 5,
2095 "current": 40,
2096 "state": "done"
2097}
2098"#;
2099 let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
2100 assert!(result.is_ok());
2101
2102 let data = r#"
2103{
2104 "min": 2,
2105 "max": 5,
2106 "current": 40
2107}
2108"#;
2109 serde_json::from_str::<WalReplayStatistics>(data)?;
2110 Ok(())
2111 }
2112}