1use crate::util::{AlertState, RuleHealth, TargetHealth};
3use enum_as_inner::EnumAsInner;
4use serde::Deserialize;
5use std::collections::HashMap;
6use std::fmt;
7use time::{Duration, OffsetDateTime, PrimitiveDateTime};
8use url::Url;
9
10mod de {
11 use serde::{
12 de::{Error as SerdeError, Unexpected},
13 Deserialize, Deserializer,
14 };
15 use std::str::FromStr;
16 use time::format_description::FormatItem;
17 use time::macros::format_description;
18 use time::{Duration, PrimitiveDateTime};
19
20 const BUILD_INFO_DATE_FORMAT: &[FormatItem] = format_description!(
21 "[year repr:full][month repr:numerical][day]-[hour repr:24]:[minute]:[second]"
22 );
23
24 pub(super) fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
25 where
26 D: Deserializer<'de>,
27 {
28 #[derive(Deserialize)]
29 #[serde(untagged)]
30 enum Value {
31 Str(String),
32 Number(f64),
33 }
34
35 match Value::deserialize(deserializer)? {
36 Value::Str(s) => f64::from_str(&s).map_err(|_| {
37 SerdeError::invalid_value(
38 Unexpected::Str(&s),
39 &"a float value inside a quoted JSON string",
40 )
41 }),
42 Value::Number(n) => Ok(n),
43 }
44 }
45
46 pub(super) fn deserialize_build_info_date<'de, D>(
48 deserializer: D,
49 ) -> Result<PrimitiveDateTime, D::Error>
50 where
51 D: Deserializer<'de>,
52 {
53 String::deserialize(deserializer).and_then(|s| {
54 PrimitiveDateTime::parse(&s, &BUILD_INFO_DATE_FORMAT).map_err(|_| {
55 SerdeError::invalid_value(
56 Unexpected::Str(&s),
57 &"a datetime string in format <yyyymmdd-hh:mm:ss>",
58 )
59 })
60 })
61 }
62
63 pub(super) fn deserialize_prometheus_duration<'de, D>(
70 deserializer: D,
71 ) -> Result<Duration, D::Error>
72 where
73 D: Deserializer<'de>,
74 {
75 let raw_str = String::deserialize(deserializer)?;
76
77 let mut total_milliseconds: i64 = 0;
78
79 let mut raw_num = String::new();
82
83 let mut duration_iter = raw_str.chars().peekable();
86
87 while let Some(item) = duration_iter.next() {
88 if ('0'..='9').contains(&item) {
89 raw_num.push(item);
90 continue;
91 }
92
93 let num = raw_num.parse::<i64>().map_err(SerdeError::custom)?;
94
95 match item {
96 'y' => {
97 total_milliseconds += num * 1000 * 60 * 60 * 24 * 365;
98 }
99 'w' => {
100 total_milliseconds += num * 1000 * 60 * 60 * 24 * 7;
101 }
102 'd' => {
103 total_milliseconds += num * 1000 * 60 * 60 * 24;
104 }
105 'h' => {
106 total_milliseconds += num * 1000 * 60 * 60;
107 }
108 'm' => {
109 if duration_iter.next_if_eq(&'s').is_some() {
110 total_milliseconds += num * 1000 * 60 * 60;
111 } else {
112 total_milliseconds += num * 1000 * 60;
113 }
114 }
115 's' => {
116 total_milliseconds += num * 1000;
117 }
118 _ => return Err(SerdeError::custom("invalid time duration")),
119 };
120
121 raw_num.clear();
122 }
123
124 Ok(Duration::milliseconds(total_milliseconds))
125 }
126}
127
128#[derive(Debug, Deserialize)]
129#[serde(tag = "status")]
130pub(crate) enum ApiResponse<D> {
131 #[serde(alias = "success")]
132 Success { data: D },
133 #[serde(alias = "error")]
134 Error(crate::error::PrometheusError),
135}
136
137#[derive(Debug, Clone, Deserialize)]
138pub struct Stats {
139 timings: Timings,
140 samples: Samples,
141}
142
143impl Stats {
144 pub fn timings(&self) -> &Timings {
145 &self.timings
146 }
147
148 pub fn samples(&self) -> &Samples {
149 &self.samples
150 }
151}
152
153#[derive(Debug, Copy, Clone, Deserialize)]
154pub struct Timings {
155 #[serde(alias = "evalTotalTime")]
156 eval_total_time: f64,
157 #[serde(alias = "resultSortTime")]
158 result_sort_time: f64,
159 #[serde(alias = "queryPreparationTime")]
160 query_preparation_time: f64,
161 #[serde(alias = "innerEvalTime")]
162 inner_eval_time: f64,
163 #[serde(alias = "execQueueTime")]
164 exec_queue_time: f64,
165 #[serde(alias = "execTotalTime")]
166 exec_total_time: f64,
167}
168
169impl Timings {
170 pub fn eval_total_time(&self) -> f64 {
171 self.eval_total_time
172 }
173
174 pub fn result_sort_time(&self) -> f64 {
175 self.result_sort_time
176 }
177
178 pub fn query_preparation_time(&self) -> f64 {
179 self.query_preparation_time
180 }
181
182 pub fn inner_eval_time(&self) -> f64 {
183 self.inner_eval_time
184 }
185
186 pub fn exec_queue_time(&self) -> f64 {
187 self.exec_queue_time
188 }
189
190 pub fn exec_total_time(&self) -> f64 {
191 self.exec_total_time
192 }
193}
194
195#[derive(Debug, Clone, Deserialize)]
196pub struct Samples {
197 #[serde(alias = "totalQueryableSamplesPerStep")]
198 total_queryable_samples_per_step: Option<Vec<Sample>>,
199 #[serde(alias = "totalQueryableSamples")]
200 total_queryable_samples: i64,
201 #[serde(alias = "peakSamples")]
202 peak_samples: i64,
203}
204
205impl Samples {
206 pub fn total_queryable_samples_per_step(&self) -> Option<&Vec<Sample>> {
207 self.total_queryable_samples_per_step.as_ref()
208 }
209
210 pub fn total_queryable_samples(&self) -> i64 {
211 self.total_queryable_samples
212 }
213
214 pub fn peak_samples(&self) -> i64 {
215 self.peak_samples
216 }
217}
218
219#[derive(Debug, Clone, Deserialize)]
220pub struct PromqlResult {
221 #[serde(flatten)]
222 pub(crate) data: Data,
223 pub(crate) stats: Option<Stats>,
224}
225
226impl PromqlResult {
227 pub fn data(&self) -> &Data {
229 &self.data
230 }
231
232 pub fn stats(&self) -> Option<&Stats> {
234 self.stats.as_ref()
235 }
236
237 pub fn into_inner(self) -> (Data, Option<Stats>) {
239 (self.data, self.stats)
240 }
241}
242
243#[derive(Clone, Debug, Deserialize, EnumAsInner)]
245#[serde(tag = "resultType", content = "result")]
246pub enum Data {
247 #[serde(alias = "vector")]
248 Vector(Vec<InstantVector>),
249 #[serde(alias = "matrix")]
250 Matrix(Vec<RangeVector>),
251 #[serde(alias = "scalar")]
252 Scalar(Sample),
253}
254
255impl Data {
256 pub fn is_empty(&self) -> bool {
258 match self {
259 Data::Vector(v) => v.is_empty(),
260 Data::Matrix(v) => v.is_empty(),
261 Data::Scalar(_) => false,
262 }
263 }
264}
265
266#[derive(Clone, Debug, PartialEq, Deserialize)]
268pub struct InstantVector {
269 pub(crate) metric: HashMap<String, String>,
270 #[serde(alias = "value")]
271 pub(crate) sample: Sample,
272}
273
274impl InstantVector {
275 pub fn metric(&self) -> &HashMap<String, String> {
278 &self.metric
279 }
280
281 pub fn sample(&self) -> &Sample {
283 &self.sample
284 }
285
286 pub fn into_inner(self) -> (HashMap<String, String>, Sample) {
288 (self.metric, self.sample)
289 }
290}
291
292#[derive(Clone, Debug, PartialEq, Deserialize)]
294pub struct RangeVector {
295 pub(crate) metric: HashMap<String, String>,
296 #[serde(alias = "values")]
297 pub(crate) samples: Vec<Sample>,
298}
299
300impl RangeVector {
301 pub fn metric(&self) -> &HashMap<String, String> {
304 &self.metric
305 }
306
307 pub fn samples(&self) -> &[Sample] {
309 &self.samples
310 }
311
312 pub fn into_inner(self) -> (HashMap<String, String>, Vec<Sample>) {
314 (self.metric, self.samples)
315 }
316}
317
318#[derive(Clone, Copy, Debug, PartialEq, Deserialize)]
320pub struct Sample {
321 pub(crate) timestamp: f64,
322 #[serde(deserialize_with = "de::deserialize_f64")]
323 pub(crate) value: f64,
324}
325
326impl Sample {
327 pub fn timestamp(&self) -> f64 {
329 self.timestamp
330 }
331
332 pub fn value(&self) -> f64 {
334 self.value
335 }
336}
337
338#[derive(Clone, Debug, Deserialize)]
340pub struct Targets {
341 #[serde(alias = "activeTargets")]
342 pub(crate) active: Vec<ActiveTarget>,
343 #[serde(alias = "droppedTargets")]
344 pub(crate) dropped: Vec<DroppedTarget>,
345}
346
347impl Targets {
348 pub fn active(&self) -> &[ActiveTarget] {
350 &self.active
351 }
352
353 pub fn dropped(&self) -> &[DroppedTarget] {
355 &self.dropped
356 }
357}
358
359#[derive(Clone, Debug, Deserialize)]
361pub struct ActiveTarget {
362 #[serde(alias = "discoveredLabels")]
363 pub(crate) discovered_labels: HashMap<String, String>,
364 pub(crate) labels: HashMap<String, String>,
365 #[serde(alias = "scrapePool")]
366 pub(crate) scrape_pool: String,
367 #[serde(alias = "scrapeUrl")]
368 pub(crate) scrape_url: Url,
369 #[serde(alias = "globalUrl")]
370 pub(crate) global_url: Url,
371 #[serde(alias = "lastError")]
372 pub(crate) last_error: String,
373 #[serde(alias = "lastScrape")]
374 #[serde(with = "time::serde::rfc3339")]
375 pub(crate) last_scrape: OffsetDateTime,
376 #[serde(alias = "lastScrapeDuration")]
377 pub(crate) last_scrape_duration: f64,
378 pub(crate) health: TargetHealth,
379 #[serde(alias = "scrapeInterval")]
380 #[serde(deserialize_with = "de::deserialize_prometheus_duration")]
381 pub(crate) scrape_interval: Duration,
382 #[serde(alias = "scrapeTimeout")]
383 #[serde(deserialize_with = "de::deserialize_prometheus_duration")]
384 pub(crate) scrape_timeout: Duration,
385}
386
387impl ActiveTarget {
388 pub fn discovered_labels(&self) -> &HashMap<String, String> {
390 &self.discovered_labels
391 }
392
393 pub fn labels(&self) -> &HashMap<String, String> {
395 &self.labels
396 }
397
398 pub fn scrape_pool(&self) -> &str {
400 &self.scrape_pool
401 }
402
403 pub fn scrape_url(&self) -> &Url {
405 &self.scrape_url
406 }
407
408 pub fn global_url(&self) -> &Url {
410 &self.global_url
411 }
412
413 pub fn last_error(&self) -> &str {
415 &self.last_error
416 }
417
418 pub fn last_scrape(&self) -> &OffsetDateTime {
420 &self.last_scrape
421 }
422
423 pub fn last_scrape_duration(&self) -> f64 {
425 self.last_scrape_duration
426 }
427
428 pub fn health(&self) -> TargetHealth {
430 self.health
431 }
432
433 pub fn scrape_interval(&self) -> &Duration {
435 &self.scrape_interval
436 }
437
438 pub fn scrape_timeout(&self) -> &Duration {
440 &self.scrape_timeout
441 }
442}
443
444#[derive(Clone, Debug, Deserialize)]
446pub struct DroppedTarget {
447 #[serde(alias = "discoveredLabels")]
448 pub(crate) discovered_labels: HashMap<String, String>,
449}
450
451impl DroppedTarget {
452 pub fn discovered_labels(&self) -> &HashMap<String, String> {
454 &self.discovered_labels
455 }
456}
457
458#[derive(Debug, Deserialize)]
461pub(crate) struct RuleGroups {
462 pub groups: Vec<RuleGroup>,
463}
464
465#[derive(Clone, Debug, Deserialize)]
467pub struct RuleGroup {
468 pub(crate) rules: Vec<Rule>,
469 pub(crate) file: String,
470 pub(crate) interval: f64,
471 pub(crate) name: String,
472 #[serde(alias = "evaluationTime")]
473 pub(crate) evaluation_time: f64,
474 #[serde(alias = "lastEvaluation", with = "time::serde::rfc3339")]
475 pub(crate) last_evaluation: OffsetDateTime,
476 pub(crate) limit: usize,
477}
478
479impl RuleGroup {
480 pub fn rules(&self) -> &[Rule] {
482 &self.rules
483 }
484
485 pub fn file(&self) -> &str {
487 &self.file
488 }
489
490 pub fn interval(&self) -> f64 {
492 self.interval
493 }
494
495 pub fn name(&self) -> &str {
497 &self.name
498 }
499
500 pub fn last_evaluation(&self) -> &OffsetDateTime {
502 &self.last_evaluation
503 }
504
505 pub fn evaluation_time(&self) -> f64 {
508 self.evaluation_time
509 }
510
511 pub fn limit(&self) -> usize {
514 self.limit
515 }
516}
517
518#[derive(Clone, Debug, Deserialize)]
520#[serde(tag = "type")]
521pub enum Rule {
522 #[serde(alias = "recording")]
523 Recording(RecordingRule),
524 #[serde(alias = "alerting")]
525 Alerting(AlertingRule),
526}
527
528impl Rule {
529 pub fn as_recording(&self) -> Option<&RecordingRule> {
530 match self {
531 Self::Recording(rule) => Some(&rule),
532 _ => None,
533 }
534 }
535
536 pub fn as_alerting(&self) -> Option<&AlertingRule> {
537 match self {
538 Self::Alerting(rule) => Some(&rule),
539 _ => None,
540 }
541 }
542}
543
544#[derive(Clone, Debug, Deserialize)]
546pub struct AlertingRule {
547 pub(crate) alerts: Vec<Alert>,
548 pub(crate) annotations: HashMap<String, String>,
549 pub(crate) duration: f64,
550 pub(crate) health: RuleHealth,
551 pub(crate) labels: HashMap<String, String>,
552 pub(crate) name: String,
553 pub(crate) query: String,
554 #[serde(alias = "evaluationTime")]
555 pub(crate) evaluation_time: f64,
556 #[serde(alias = "lastEvaluation", with = "time::serde::rfc3339")]
557 pub(crate) last_evaluation: OffsetDateTime,
558 #[serde(alias = "keepFiringFor")]
559 pub(crate) keep_firing_for: f64,
560}
561
562impl AlertingRule {
563 pub fn alerts(&self) -> &[Alert] {
565 &self.alerts
566 }
567
568 pub fn annotations(&self) -> &HashMap<String, String> {
570 &self.annotations
571 }
572
573 pub fn duration(&self) -> f64 {
575 self.duration
576 }
577
578 pub fn health(&self) -> RuleHealth {
580 self.health
581 }
582
583 pub fn labels(&self) -> &HashMap<String, String> {
585 &self.labels
586 }
587
588 pub fn name(&self) -> &str {
590 &self.name
591 }
592
593 pub fn query(&self) -> &str {
595 &self.query
596 }
597
598 pub fn last_evaluation(&self) -> &OffsetDateTime {
600 &self.last_evaluation
601 }
602
603 pub fn evaluation_time(&self) -> f64 {
605 self.evaluation_time
606 }
607
608 pub fn keep_firing_for(&self) -> f64 {
611 self.keep_firing_for
612 }
613}
614
615#[derive(Clone, Debug, Deserialize)]
617pub struct RecordingRule {
618 pub(crate) health: RuleHealth,
619 pub(crate) name: String,
620 pub(crate) query: String,
621 pub(crate) labels: Option<HashMap<String, String>>,
622 #[serde(alias = "evaluationTime")]
623 pub(crate) evaluation_time: f64,
624 #[serde(alias = "lastEvaluation", with = "time::serde::rfc3339")]
625 pub(crate) last_evaluation: OffsetDateTime,
626}
627
628impl RecordingRule {
629 pub fn health(&self) -> RuleHealth {
631 self.health
632 }
633
634 pub fn name(&self) -> &str {
636 &self.name
637 }
638
639 pub fn query(&self) -> &str {
641 &self.query
642 }
643
644 pub fn labels(&self) -> &Option<HashMap<String, String>> {
646 &self.labels
647 }
648
649 pub fn last_evaluation(&self) -> &OffsetDateTime {
651 &self.last_evaluation
652 }
653
654 pub fn evaluation_time(&self) -> f64 {
656 self.evaluation_time
657 }
658}
659
660#[derive(Debug, Deserialize)]
663pub(crate) struct Alerts {
664 pub alerts: Vec<Alert>,
665}
666
667#[derive(Clone, Debug, Deserialize)]
669pub struct Alert {
670 #[serde(alias = "activeAt")]
671 #[serde(with = "time::serde::rfc3339")]
672 pub(crate) active_at: OffsetDateTime,
673 pub(crate) annotations: HashMap<String, String>,
674 pub(crate) labels: HashMap<String, String>,
675 pub(crate) state: AlertState,
676 #[serde(deserialize_with = "de::deserialize_f64")]
677 pub(crate) value: f64,
678}
679
680impl Alert {
681 pub fn active_at(&self) -> &OffsetDateTime {
683 &self.active_at
684 }
685
686 pub fn annotations(&self) -> &HashMap<String, String> {
688 &self.annotations
689 }
690
691 pub fn labels(&self) -> &HashMap<String, String> {
693 &self.labels
694 }
695
696 pub fn state(&self) -> AlertState {
698 self.state
699 }
700
701 pub fn value(&self) -> f64 {
703 self.value
704 }
705}
706
707#[derive(Clone, Debug, Deserialize)]
709pub struct Alertmanagers {
710 #[serde(alias = "activeAlertmanagers")]
711 pub(crate) active: Vec<Alertmanager>,
712 #[serde(alias = "droppedAlertmanagers")]
713 pub(crate) dropped: Vec<Alertmanager>,
714}
715
716impl Alertmanagers {
717 pub fn active(&self) -> &[Alertmanager] {
719 &self.active
720 }
721
722 pub fn dropped(&self) -> &[Alertmanager] {
724 &self.dropped
725 }
726}
727
728#[derive(Clone, Debug, Deserialize)]
730pub struct Alertmanager {
731 url: Url,
732}
733
734impl Alertmanager {
735 pub fn url(&self) -> &Url {
737 &self.url
738 }
739}
740
741#[derive(Debug, Copy, Clone, Deserialize, Eq, PartialEq)]
743pub enum MetricType {
744 #[serde(alias = "counter")]
745 Counter,
746 #[serde(alias = "gauge")]
747 Gauge,
748 #[serde(alias = "histogram")]
749 Histogram,
750 #[serde(alias = "gaugehistogram")]
751 GaugeHistogram,
752 #[serde(alias = "summary")]
753 Summary,
754 #[serde(alias = "info")]
755 Info,
756 #[serde(alias = "stateset")]
757 Stateset,
758 #[serde(alias = "unknown")]
759 Unknown,
760}
761
762impl MetricType {
763 pub fn is_counter(&self) -> bool {
764 *self == Self::Counter
765 }
766
767 pub fn is_gauge(&self) -> bool {
768 *self == Self::Gauge
769 }
770
771 pub fn is_histogram(&self) -> bool {
772 *self == Self::Histogram
773 }
774
775 pub fn is_gauge_histogram(&self) -> bool {
776 *self == Self::GaugeHistogram
777 }
778
779 pub fn is_summary(&self) -> bool {
780 *self == Self::Summary
781 }
782
783 pub fn is_info(&self) -> bool {
784 *self == Self::Info
785 }
786
787 pub fn is_stateset(&self) -> bool {
788 *self == Self::Stateset
789 }
790
791 pub fn is_unknown(&self) -> bool {
792 *self == Self::Unknown
793 }
794}
795
796impl fmt::Display for MetricType {
797 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
798 match self {
799 MetricType::Counter => write!(f, "counter"),
800 MetricType::Gauge => write!(f, "gauge"),
801 MetricType::Histogram => write!(f, "histogram"),
802 MetricType::GaugeHistogram => write!(f, "gaugehistogram"),
803 MetricType::Summary => write!(f, "summary"),
804 MetricType::Info => write!(f, "info"),
805 MetricType::Stateset => write!(f, "stateset"),
806 MetricType::Unknown => write!(f, "unknown"),
807 }
808 }
809}
810
811#[derive(Clone, Debug, Deserialize)]
813pub struct TargetMetadata {
814 pub(crate) target: HashMap<String, String>,
815 #[serde(alias = "type")]
816 pub(crate) metric_type: MetricType,
817 pub(crate) metric: Option<String>,
818 pub(crate) help: String,
819 pub(crate) unit: String,
820}
821
822impl TargetMetadata {
823 pub fn target(&self) -> &HashMap<String, String> {
825 &self.target
826 }
827
828 pub fn metric_type(&self) -> MetricType {
830 self.metric_type
831 }
832
833 pub fn metric(&self) -> Option<&str> {
835 self.metric.as_deref()
836 }
837
838 pub fn help(&self) -> &str {
840 &self.help
841 }
842
843 pub fn unit(&self) -> &str {
845 &self.unit
846 }
847}
848
849#[derive(Clone, Debug, Deserialize)]
851pub struct MetricMetadata {
852 #[serde(alias = "type")]
853 pub(crate) metric_type: MetricType,
854 pub(crate) help: String,
855 pub(crate) unit: String,
856}
857
858impl MetricMetadata {
859 pub fn metric_type(&self) -> MetricType {
861 self.metric_type
862 }
863
864 pub fn help(&self) -> &str {
866 &self.help
867 }
868
869 pub fn unit(&self) -> &str {
871 &self.unit
872 }
873}
874
875#[derive(Clone, Debug, Deserialize)]
877pub struct BuildInformation {
878 pub(crate) version: String,
879 pub(crate) revision: String,
880 pub(crate) branch: String,
881 #[serde(alias = "buildUser")]
882 pub(crate) build_user: String,
883 #[serde(alias = "buildDate")]
884 #[serde(deserialize_with = "de::deserialize_build_info_date")]
885 pub(crate) build_date: PrimitiveDateTime,
886 #[serde(alias = "goVersion")]
887 pub(crate) go_version: String,
888}
889
890impl BuildInformation {
891 pub fn version(&self) -> &str {
893 &self.version
894 }
895
896 pub fn revision(&self) -> &str {
898 &self.revision
899 }
900
901 pub fn branch(&self) -> &str {
903 &self.branch
904 }
905
906 pub fn build_user(&self) -> &str {
908 &self.build_user
909 }
910
911 pub fn build_date(&self) -> &PrimitiveDateTime {
913 &self.build_date
914 }
915
916 pub fn go_version(&self) -> &str {
918 &self.go_version
919 }
920}
921
922#[derive(Clone, Debug, Deserialize)]
924pub struct RuntimeInformation {
925 #[serde(alias = "startTime")]
926 #[serde(with = "time::serde::rfc3339")]
927 pub(crate) start_time: OffsetDateTime,
928 #[serde(alias = "CWD")]
929 pub(crate) cwd: String,
930 #[serde(alias = "reloadConfigSuccess")]
931 pub(crate) reload_config_success: bool,
932 #[serde(alias = "lastConfigTime")]
933 #[serde(with = "time::serde::rfc3339")]
934 pub(crate) last_config_time: OffsetDateTime,
935 #[serde(alias = "corruptionCount")]
936 pub(crate) corruption_count: i64,
937 #[serde(alias = "goroutineCount")]
938 pub(crate) goroutine_count: usize,
939 #[serde(alias = "GOMAXPROCS")]
940 pub(crate) go_max_procs: usize,
941 #[serde(alias = "GOGC")]
942 pub(crate) go_gc: String,
943 #[serde(alias = "GODEBUG")]
944 pub(crate) go_debug: String,
945 #[serde(alias = "storageRetention")]
946 #[serde(deserialize_with = "de::deserialize_prometheus_duration")]
947 pub(crate) storage_retention: Duration,
948}
949
950impl RuntimeInformation {
951 pub fn start_time(&self) -> &OffsetDateTime {
953 &self.start_time
954 }
955
956 pub fn cwd(&self) -> &str {
958 &self.cwd
959 }
960
961 pub fn reload_config_success(&self) -> bool {
963 self.reload_config_success
964 }
965
966 pub fn last_config_time(&self) -> &OffsetDateTime {
968 &self.last_config_time
969 }
970
971 pub fn corruption_count(&self) -> i64 {
972 self.corruption_count
973 }
974
975 pub fn goroutine_count(&self) -> usize {
976 self.goroutine_count
977 }
978
979 pub fn go_max_procs(&self) -> usize {
980 self.go_max_procs
981 }
982
983 pub fn go_gc(&self) -> &str {
984 &self.go_gc
985 }
986
987 pub fn go_debug(&self) -> &str {
988 &self.go_debug
989 }
990
991 pub fn storage_retention(&self) -> &Duration {
992 &self.storage_retention
993 }
994}
995
996#[derive(Clone, Debug, Deserialize)]
998pub struct TsdbStatistics {
999 #[serde(alias = "headStats")]
1000 pub(crate) head_stats: HeadStatistics,
1001 #[serde(alias = "seriesCountByMetricName")]
1002 pub(crate) series_count_by_metric_name: Vec<TsdbItemCount>,
1003 #[serde(alias = "labelValueCountByLabelName")]
1004 pub(crate) label_value_count_by_label_name: Vec<TsdbItemCount>,
1005 #[serde(alias = "memoryInBytesByLabelName")]
1006 pub(crate) memory_in_bytes_by_label_name: Vec<TsdbItemCount>,
1007 #[serde(alias = "seriesCountByLabelValuePair")]
1008 pub(crate) series_count_by_label_value_pair: Vec<TsdbItemCount>,
1009}
1010
1011impl TsdbStatistics {
1012 pub fn head_stats(&self) -> HeadStatistics {
1014 self.head_stats
1015 }
1016
1017 pub fn series_count_by_metric_name(&self) -> &[TsdbItemCount] {
1019 &self.series_count_by_metric_name
1020 }
1021
1022 pub fn label_value_count_by_label_name(&self) -> &[TsdbItemCount] {
1024 &self.label_value_count_by_label_name
1025 }
1026
1027 pub fn memory_in_bytes_by_label_name(&self) -> &[TsdbItemCount] {
1029 &self.memory_in_bytes_by_label_name
1030 }
1031
1032 pub fn series_count_by_label_value_pair(&self) -> &[TsdbItemCount] {
1034 &self.series_count_by_label_value_pair
1035 }
1036}
1037
1038#[derive(Clone, Copy, Debug, Deserialize)]
1040pub struct HeadStatistics {
1041 #[serde(alias = "numSeries")]
1042 pub(crate) num_series: usize,
1043 #[serde(alias = "chunkCount")]
1044 pub(crate) chunk_count: usize,
1045 #[serde(alias = "minTime")]
1046 pub(crate) min_time: i64,
1047 #[serde(alias = "maxTime")]
1048 pub(crate) max_time: i64,
1049}
1050
1051impl HeadStatistics {
1052 pub fn num_series(&self) -> usize {
1054 self.num_series
1055 }
1056
1057 pub fn chunk_count(&self) -> usize {
1059 self.chunk_count
1060 }
1061
1062 pub fn min_time(&self) -> i64 {
1064 self.min_time
1065 }
1066
1067 pub fn max_time(&self) -> i64 {
1069 self.max_time
1070 }
1071}
1072
1073#[derive(Clone, Debug, Deserialize)]
1075pub struct TsdbItemCount {
1076 pub(crate) name: String,
1077 pub(crate) value: usize,
1078}
1079
1080impl TsdbItemCount {
1081 pub fn name(&self) -> &str {
1083 &self.name
1084 }
1085
1086 pub fn value(&self) -> usize {
1088 self.value
1089 }
1090}
1091
1092#[derive(Clone, Copy, Debug, Deserialize)]
1094pub struct WalReplayStatistics {
1095 pub(crate) min: usize,
1096 pub(crate) max: usize,
1097 pub(crate) current: usize,
1098 pub(crate) state: Option<WalReplayState>,
1099}
1100
1101impl WalReplayStatistics {
1102 pub fn min(&self) -> usize {
1103 self.min
1104 }
1105
1106 pub fn max(&self) -> usize {
1107 self.max
1108 }
1109
1110 pub fn current(&self) -> usize {
1111 self.current
1112 }
1113
1114 pub fn state(&self) -> Option<WalReplayState> {
1115 self.state
1116 }
1117}
1118
1119#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq)]
1120pub enum WalReplayState {
1121 #[serde(alias = "waiting")]
1122 Waiting,
1123 #[serde(alias = "in progress")]
1124 InProgress,
1125 #[serde(alias = "done")]
1126 Done,
1127}
1128
1129impl WalReplayState {
1130 pub fn is_waiting(&self) -> bool {
1131 *self == Self::Waiting
1132 }
1133
1134 pub fn is_in_progress(&self) -> bool {
1135 *self == Self::InProgress
1136 }
1137
1138 pub fn is_done(&self) -> bool {
1139 *self == Self::Done
1140 }
1141}
1142
1143#[cfg(test)]
1144mod tests {
1145 use super::*;
1149 use std::collections::HashMap;
1150 use time::macros::datetime;
1151
1152 #[test]
1153 fn test_api_error_deserialization() -> Result<(), anyhow::Error> {
1154 let data = r#"
1155{
1156 "status": "error",
1157 "data": null,
1158 "errorType": "bad_data",
1159 "error": "1:14: parse error: unexpected end of input in aggregation",
1160 "warnings": []
1161}
1162"#;
1163
1164 let result = serde_json::from_str::<ApiResponse<PromqlResult>>(data)?;
1165 assert!(
1166 matches!(result, ApiResponse::Error(err) if err.error_type == crate::error::PrometheusErrorType::BadData)
1167 );
1168
1169 Ok(())
1170 }
1171
1172 #[test]
1173 fn test_api_success_deserialization() -> Result<(), anyhow::Error> {
1174 let data = r#"
1175{
1176 "status": "success",
1177 "data": {
1178 "resultType": "scalar",
1179 "result": [ 0, "0.0" ]
1180 },
1181 "warnings": []
1182}
1183"#;
1184
1185 let result = serde_json::from_str::<ApiResponse<PromqlResult>>(data)?;
1186 assert!(matches!(result, ApiResponse::Success { data: _ }));
1187
1188 Ok(())
1189 }
1190
1191 #[test]
1192 fn test_bad_combination_in_deserialization() -> Result<(), anyhow::Error> {
1193 let data = r#"
1194{
1195 "status": "error",
1196 "data": {
1197 "resultType": "scalar",
1198 "result": [ 0, "0.0" ]
1199 },
1200 "warnings": []
1201}
1202"#;
1203
1204 let result = serde_json::from_str::<ApiResponse<()>>(data);
1205 assert!(result.is_err());
1206
1207 Ok(())
1208 }
1209
1210 #[test]
1211 fn test_another_bad_combination_in_deserialization() -> Result<(), anyhow::Error> {
1212 let data = r#"
1213{
1214 "status": "success",
1215 "warnings": []
1216 "errorType": "bad_data",
1217 "error": "1:14: parse error: unexpected end of input in aggregation",
1218}
1219"#;
1220
1221 let result = serde_json::from_str::<ApiResponse<()>>(data);
1222 assert!(result.is_err());
1223
1224 Ok(())
1225 }
1226
1227 #[test]
1228 fn test_query_result_deserialization() -> Result<(), anyhow::Error> {
1229 let data = r#"
1230{
1231 "resultType": "matrix",
1232 "result": [
1233 {
1234 "metric": {
1235 "__name__": "up",
1236 "instance": "localhost:9090",
1237 "job": "prometheus"
1238 },
1239 "values": [
1240 [
1241 1659268100,
1242 "1"
1243 ],
1244 [
1245 1659268160,
1246 "1"
1247 ],
1248 [
1249 1659268220,
1250 "1"
1251 ],
1252 [
1253 1659268280,
1254 "1"
1255 ]
1256 ]
1257 }
1258 ],
1259 "stats": {
1260 "timings": {
1261 "evalTotalTime": 0.000102139,
1262 "resultSortTime": 8.7e-07,
1263 "queryPreparationTime": 5.4169e-05,
1264 "innerEvalTime": 3.787e-05,
1265 "execQueueTime": 4.07e-05,
1266 "execTotalTime": 0.000151989
1267 },
1268 "samples": {
1269 "totalQueryableSamplesPerStep": [
1270 [
1271 1659268100,
1272 1
1273 ],
1274 [
1275 1659268160,
1276 1
1277 ],
1278 [
1279 1659268220,
1280 1
1281 ],
1282 [
1283 1659268280,
1284 1
1285 ]
1286 ],
1287 "totalQueryableSamples": 4,
1288 "peakSamples": 4
1289 }
1290 }
1291}
1292"#;
1293 let result = serde_json::from_str::<PromqlResult>(data)?;
1294 let data = &result.data;
1295 assert!(data.is_matrix());
1296 let matrix = data.as_matrix().unwrap();
1297 assert!(matrix.len() == 1);
1298 let range_vector = &matrix[0];
1299 let metric = &range_vector.metric();
1300 assert!(metric.len() == 3);
1301 assert!(metric.get("__name__").is_some_and(|v| v == "up"));
1302 assert!(metric
1303 .get("instance")
1304 .is_some_and(|v| v == "localhost:9090"));
1305 assert!(metric.get("job").is_some_and(|v| v == "prometheus"));
1306 let samples = range_vector.samples();
1307 assert!(samples.len() == 4);
1308 assert!(samples[0].timestamp() == 1659268100.0);
1309 assert!(samples[0].value() == 1.0);
1310 assert!(samples[1].timestamp() == 1659268160.0);
1311 assert!(samples[1].value() == 1.0);
1312 assert!(samples[2].timestamp() == 1659268220.0);
1313 assert!(samples[2].value() == 1.0);
1314 assert!(samples[3].timestamp() == 1659268280.0);
1315 assert!(samples[3].value() == 1.0);
1316 assert!(result.stats().is_some());
1317 let stats = result.stats().unwrap();
1318 let timings = stats.timings();
1319 assert!(timings.eval_total_time() == 0.000102139);
1320 assert!(timings.result_sort_time() == 8.7e-07_f64);
1321 assert!(timings.query_preparation_time() == 5.4169e-05_f64);
1322 assert!(timings.inner_eval_time() == 3.787e-05_f64);
1323 assert!(timings.exec_queue_time() == 4.07e-05_f64);
1324 assert!(timings.exec_total_time() == 0.000151989);
1325 let samples = stats.samples();
1326 assert!(samples.peak_samples() == 4);
1327 assert!(samples.total_queryable_samples() == 4);
1328 assert!(samples.total_queryable_samples_per_step().is_some());
1329 let per_step = samples.total_queryable_samples_per_step().unwrap();
1330 assert!(per_step.len() == 4);
1331 assert!(per_step[0].timestamp() == 1659268100.0);
1332 assert!(per_step[0].value() == 1.0);
1333 assert!(per_step[1].timestamp() == 1659268160.0);
1334 assert!(per_step[1].value() == 1.0);
1335 assert!(per_step[2].timestamp() == 1659268220.0);
1336 assert!(per_step[2].value() == 1.0);
1337 assert!(per_step[3].timestamp() == 1659268280.0);
1338 assert!(per_step[3].value() == 1.0);
1339 Ok(())
1340 }
1341
1342 #[test]
1343 fn test_query_result_no_per_step_stats_deserialization() -> Result<(), anyhow::Error> {
1344 let data = r#"
1345{
1346 "resultType": "matrix",
1347 "result": [
1348 {
1349 "metric": {
1350 "__name__": "up",
1351 "instance": "localhost:9090",
1352 "job": "prometheus"
1353 },
1354 "values": [
1355 [
1356 1659268100,
1357 "1"
1358 ],
1359 [
1360 1659268160,
1361 "1"
1362 ],
1363 [
1364 1659268220,
1365 "1"
1366 ],
1367 [
1368 1659268280,
1369 "1"
1370 ]
1371 ]
1372 }
1373 ],
1374 "stats": {
1375 "timings": {
1376 "evalTotalTime": 0.000102139,
1377 "resultSortTime": 8.7e-07,
1378 "queryPreparationTime": 5.4169e-05,
1379 "innerEvalTime": 3.787e-05,
1380 "execQueueTime": 4.07e-05,
1381 "execTotalTime": 0.000151989
1382 },
1383 "samples": {
1384 "totalQueryableSamples": 4,
1385 "peakSamples": 4
1386 }
1387 }
1388}
1389"#;
1390 let result = serde_json::from_str::<PromqlResult>(data)?;
1391 assert!(result.stats().is_some());
1392 let stats = result.stats().unwrap();
1393 assert!(stats.samples().total_queryable_samples_per_step().is_none());
1394
1395 Ok(())
1396 }
1397
1398 #[test]
1399 fn test_query_result_no_stats_deserialization() -> Result<(), anyhow::Error> {
1400 let data = r#"
1401{
1402 "resultType": "matrix",
1403 "result": [
1404 {
1405 "metric": {
1406 "__name__": "up",
1407 "instance": "localhost:9090",
1408 "job": "prometheus"
1409 },
1410 "values": [
1411 [
1412 1659268100,
1413 "1"
1414 ],
1415 [
1416 1659268160,
1417 "1"
1418 ],
1419 [
1420 1659268220,
1421 "1"
1422 ],
1423 [
1424 1659268280,
1425 "1"
1426 ]
1427 ]
1428 }
1429 ]
1430}
1431"#;
1432 let result = serde_json::from_str::<PromqlResult>(data)?;
1433 assert!(result.stats().is_none());
1434
1435 Ok(())
1436 }
1437
1438 #[test]
1439 fn test_instant_vector_deserialization() -> Result<(), anyhow::Error> {
1440 let data = r#"
1441[
1442 {
1443 "metric": {
1444 "__name__": "up",
1445 "job": "prometheus",
1446 "instance": "localhost:9090"
1447 },
1448 "value": [
1449 1435781451.781,
1450 "1"
1451 ]
1452 },
1453 {
1454 "metric": {
1455 "__name__": "up",
1456 "job": "node",
1457 "instance": "localhost:9100"
1458 },
1459 "value": [
1460 1435781451.781,
1461 "0"
1462 ]
1463 }
1464]
1465"#;
1466 serde_json::from_str::<Vec<InstantVector>>(data)?;
1467 Ok(())
1468 }
1469
1470 #[test]
1471 fn test_range_vector_deserialization() -> Result<(), anyhow::Error> {
1472 let data = r#"
1473[
1474 {
1475 "metric": {
1476 "__name__": "up",
1477 "job": "prometheus",
1478 "instance": "localhost:9090"
1479 },
1480 "values": [
1481 [
1482 1435781430.781,
1483 "1"
1484 ],
1485 [
1486 1435781445.781,
1487 "1"
1488 ],
1489 [
1490 1435781460.781,
1491 "1"
1492 ]
1493 ]
1494 },
1495 {
1496 "metric": {
1497 "__name__": "up",
1498 "job": "node",
1499 "instance": "localhost:9091"
1500 },
1501 "values": [
1502 [
1503 1435781430.781,
1504 "0"
1505 ],
1506 [
1507 1435781445.781,
1508 "0"
1509 ],
1510 [
1511 1435781460.781,
1512 "1"
1513 ]
1514 ]
1515 }
1516]
1517"#;
1518 serde_json::from_str::<Vec<RangeVector>>(data)?;
1519 Ok(())
1520 }
1521
1522 #[test]
1523 fn test_target_deserialization() -> Result<(), anyhow::Error> {
1524 let data = r#"
1525{
1526 "activeTargets": [
1527 {
1528 "discoveredLabels": {
1529 "__address__": "127.0.0.1:9090",
1530 "__metrics_path__": "/metrics",
1531 "__scheme__": "http",
1532 "job": "prometheus"
1533 },
1534 "labels": {
1535 "instance": "127.0.0.1:9090",
1536 "job": "prometheus"
1537 },
1538 "scrapePool": "prometheus",
1539 "scrapeUrl": "http://127.0.0.1:9090/metrics",
1540 "globalUrl": "http://example-prometheus:9090/metrics",
1541 "lastError": "",
1542 "lastScrape": "2017-01-17T15:07:44.723715405+01:00",
1543 "lastScrapeDuration": 0.050688943,
1544 "health": "up",
1545 "scrapeInterval": "1m",
1546 "scrapeTimeout": "10s"
1547 }
1548 ],
1549 "droppedTargets": [
1550 {
1551 "discoveredLabels": {
1552 "__address__": "127.0.0.1:9100",
1553 "__metrics_path__": "/metrics",
1554 "__scheme__": "http",
1555 "__scrape_interval__": "1m",
1556 "__scrape_timeout__": "10s",
1557 "job": "node"
1558 }
1559 }
1560 ]
1561}
1562"#;
1563 let targets = serde_json::from_str::<Targets>(data)?;
1564 let active = &targets.active();
1565 assert!(active.len() == 1);
1566 let target = &active[0];
1567 assert!(target
1568 .discovered_labels()
1569 .get("__address__")
1570 .is_some_and(|v| v == "127.0.0.1:9090"));
1571 assert!(target
1572 .discovered_labels()
1573 .get("__metrics_path__")
1574 .is_some_and(|v| v == "/metrics"));
1575 assert!(target
1576 .discovered_labels()
1577 .get("__scheme__")
1578 .is_some_and(|v| v == "http"));
1579 assert!(target
1580 .discovered_labels()
1581 .get("job")
1582 .is_some_and(|v| v == "prometheus"));
1583 assert!(target
1584 .labels()
1585 .get("instance")
1586 .is_some_and(|v| v == "127.0.0.1:9090"));
1587 assert!(target
1588 .labels()
1589 .get("job")
1590 .is_some_and(|v| v == "prometheus"));
1591 assert!(target.scrape_pool() == "prometheus");
1592 assert!(target.scrape_url() == &Url::parse("http://127.0.0.1:9090/metrics")?);
1593 assert!(target.global_url() == &Url::parse("http://example-prometheus:9090/metrics")?);
1594 assert!(target.last_error().is_empty());
1595 assert!(target.last_scrape() == &datetime!(2017-01-17 15:07:44.723715405 +1));
1596 assert!(target.last_scrape_duration() == 0.050688943);
1597 assert!(target.health().is_up());
1598 assert!(target.scrape_interval() == &Duration::seconds(60));
1599 assert!(target.scrape_timeout() == &Duration::seconds(10));
1600 let dropped = &targets.dropped();
1601 assert!(dropped.len() == 1);
1602 let target = &dropped[0];
1603 assert!(target
1604 .discovered_labels()
1605 .get("__address__")
1606 .is_some_and(|v| v == "127.0.0.1:9100"));
1607 assert!(target
1608 .discovered_labels()
1609 .get("__metrics_path__")
1610 .is_some_and(|v| v == "/metrics"));
1611 assert!(target
1612 .discovered_labels()
1613 .get("__scheme__")
1614 .is_some_and(|v| v == "http"));
1615 assert!(target
1616 .discovered_labels()
1617 .get("__scrape_interval__")
1618 .is_some_and(|v| v == "1m"));
1619 assert!(target
1620 .discovered_labels()
1621 .get("__scrape_timeout__")
1622 .is_some_and(|v| v == "10s"));
1623 assert!(target
1624 .discovered_labels()
1625 .get("job")
1626 .is_some_and(|v| v == "node"));
1627 Ok(())
1628 }
1629
1630 #[test]
1631 fn test_rule_group_deserialization() -> Result<(), anyhow::Error> {
1632 let data = r#"
1633{
1634 "groups": [
1635 {
1636 "rules": [
1637 {
1638 "alerts": [
1639 {
1640 "activeAt": "2018-07-04T20:27:12.60602144+02:00",
1641 "annotations": {
1642 "summary": "High request latency"
1643 },
1644 "labels": {
1645 "alertname": "HighRequestLatency",
1646 "severity": "page"
1647 },
1648 "state": "firing",
1649 "value": "1e+00"
1650 }
1651 ],
1652 "annotations": {
1653 "summary": "High request latency"
1654 },
1655 "duration": 600,
1656 "health": "ok",
1657 "labels": {
1658 "severity": "page"
1659 },
1660 "name": "HighRequestLatency",
1661 "query": "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5",
1662 "type": "alerting",
1663 "evaluationTime": 0.000312805,
1664 "lastEvaluation": "2023-10-05T19:51:25.462004334+02:00",
1665 "keepFiringFor": 60
1666 },
1667 {
1668 "health": "ok",
1669 "name": "job:http_inprogress_requests:sum",
1670 "query": "sum by (job) (http_inprogress_requests)",
1671 "type": "recording",
1672 "evaluationTime": 0.000256946,
1673 "lastEvaluation": "2023-10-05T19:51:25.052982522+02:00"
1674 }
1675 ],
1676 "file": "/rules.yaml",
1677 "interval": 60,
1678 "limit": 0,
1679 "name": "example",
1680 "evaluationTime": 0.000267716,
1681 "lastEvaluation": "2023-10-05T19:51:25.052974842+02:00"
1682 }
1683 ]
1684}
1685"#;
1686 let groups = serde_json::from_str::<RuleGroups>(data)?.groups;
1687 assert!(groups.len() == 1);
1688 let group = &groups[0];
1689 assert!(group.name() == "example");
1690 assert!(group.file() == "/rules.yaml");
1691 assert!(group.interval() == 60.0);
1692 assert!(group.limit() == 0);
1693 assert!(group.evaluation_time() == 0.000267716);
1694 assert!(group.last_evaluation() == &datetime!(2023-10-05 7:51:25.052974842 pm +2));
1695 assert!(group.rules().len() == 2);
1696 let alerting_rule = &group.rules[0].as_alerting().unwrap();
1697 assert!(alerting_rule.health() == RuleHealth::Good);
1698 assert!(alerting_rule.name() == "HighRequestLatency");
1699 assert!(alerting_rule.query() == "job:request_latency_seconds:mean5m{job=\"myjob\"} > 0.5");
1700 assert!(alerting_rule.evaluation_time() == 0.000312805);
1701 assert!(alerting_rule.last_evaluation() == &datetime!(2023-10-05 7:51:25.462004334 pm +2));
1702 assert!(alerting_rule.duration() == 600.0);
1703 assert!(alerting_rule.keep_firing_for() == 60.0);
1704 assert!(alerting_rule.alerts().len() == 1);
1705 assert!(alerting_rule
1706 .annotations()
1707 .get("summary")
1708 .is_some_and(|v| v == "High request latency"));
1709 let alert = &alerting_rule.alerts()[0];
1710 assert!(alert.value() == 1.0);
1711 assert!(alert.state().is_firing());
1712 assert!(alert.active_at() == &datetime!(2018-07-04 20:27:12.60602144 +2));
1713 let recording_rule = &group.rules[1].as_recording().unwrap();
1714 assert!(recording_rule.health() == RuleHealth::Good);
1715 assert!(recording_rule.name() == "job:http_inprogress_requests:sum");
1716 assert!(recording_rule.query() == "sum by (job) (http_inprogress_requests)");
1717 assert!(recording_rule.evaluation_time() == 0.000256946);
1718 assert!(recording_rule.last_evaluation() == &datetime!(2023-10-05 7:51:25.052982522 pm +2));
1719 Ok(())
1720 }
1721
1722 #[test]
1723 fn test_alert_deserialization() -> Result<(), anyhow::Error> {
1724 let data = r#"
1725{
1726 "alerts": [
1727 {
1728 "activeAt":"2018-07-04T20:27:12.60602144+02:00",
1729 "annotations":{
1730 },
1731 "labels":{
1732 "alertname":"my-alert"
1733 },
1734 "state":"firing",
1735 "value":"1e+00"
1736 }
1737 ]
1738}
1739"#;
1740 serde_json::from_str::<Alerts>(data)?;
1741 Ok(())
1742 }
1743
1744 #[test]
1745 fn test_target_metadata_deserialization_1() -> Result<(), anyhow::Error> {
1746 let data = r#"
1747[
1748 {
1749 "target": {
1750 "instance": "127.0.0.1:9090",
1751 "job": "prometheus"
1752 },
1753 "type": "gauge",
1754 "help": "Number of goroutines that currently exist.",
1755 "unit": ""
1756 },
1757 {
1758 "target": {
1759 "instance": "127.0.0.1:9091",
1760 "job": "prometheus"
1761 },
1762 "type": "gauge",
1763 "help": "Number of goroutines that currently exist.",
1764 "unit": ""
1765 },
1766 {
1767 "target": {
1768 "instance": "localhost:9090",
1769 "job": "prometheus"
1770 },
1771 "metric": "process_virtual_memory_bytes",
1772 "type": "gauge",
1773 "help": "Virtual memory size in bytes.",
1774 "unit": ""
1775 },
1776 {
1777 "target": {
1778 "instance": "localhost:9090",
1779 "job": "prometheus"
1780 },
1781 "metric": "prometheus_http_response_size_bytes",
1782 "type": "histogram",
1783 "help": "Histogram of response size for HTTP requests.",
1784 "unit": ""
1785 },
1786 {
1787 "target": {
1788 "instance": "localhost:9090",
1789 "job": "prometheus"
1790 },
1791 "metric": "prometheus_ready",
1792 "type": "gauge",
1793 "help": "Whether Prometheus startup was fully completed and the server is ready for normal operation.",
1794 "unit": ""
1795 },
1796 {
1797 "target": {
1798 "instance": "localhost:9090",
1799 "job": "prometheus"
1800 },
1801 "metric": "prometheus_rule_group_iterations_missed_total",
1802 "type": "counter",
1803 "help": "The total number of rule group evaluations missed due to slow rule group evaluation.",
1804 "unit": ""
1805 },
1806 {
1807 "target": {
1808 "instance": "localhost:9090",
1809 "job": "prometheus"
1810 },
1811 "metric": "prometheus_target_scrape_pool_reloads_failed_total",
1812 "type": "counter",
1813 "help": "Total number of failed scrape pool reloads.",
1814 "unit": ""
1815 },
1816 {
1817 "target": {
1818 "instance": "localhost:9090",
1819 "job": "prometheus"
1820 },
1821 "metric": "prometheus_target_scrape_pool_reloads_total",
1822 "type": "counter",
1823 "help": "Total number of scrape pool reloads.",
1824 "unit": ""
1825 }
1826]
1827"#;
1828 let metadata = serde_json::from_str::<Vec<TargetMetadata>>(data)?;
1829 assert!(metadata.len() == 8);
1830 let first = &metadata[0];
1831 assert!(first
1832 .target()
1833 .get("instance")
1834 .is_some_and(|v| v == "127.0.0.1:9090"));
1835 assert!(first.target().get("job").is_some_and(|v| v == "prometheus"));
1836 assert!(first.metric_type().is_gauge());
1837 assert!(first.help() == "Number of goroutines that currently exist.");
1838 assert!(first.unit().is_empty());
1839 assert!(first.metric().is_none());
1840 let third = &metadata[2];
1841 assert!(third
1842 .target()
1843 .get("instance")
1844 .is_some_and(|v| v == "localhost:9090"));
1845 assert!(third.target().get("job").is_some_and(|v| v == "prometheus"));
1846 assert!(third.metric_type().is_gauge());
1847 assert!(third.help() == "Virtual memory size in bytes.");
1848 assert!(third.unit().is_empty());
1849 assert!(third
1850 .metric()
1851 .is_some_and(|v| v == "process_virtual_memory_bytes"));
1852 let fourth = &metadata[3];
1853 assert!(fourth
1854 .target()
1855 .get("instance")
1856 .is_some_and(|v| v == "localhost:9090"));
1857 assert!(fourth
1858 .target()
1859 .get("job")
1860 .is_some_and(|v| v == "prometheus"));
1861 assert!(fourth.metric_type().is_histogram());
1862 assert!(fourth.help() == "Histogram of response size for HTTP requests.");
1863 assert!(fourth.unit().is_empty());
1864 assert!(fourth
1865 .metric()
1866 .is_some_and(|v| v == "prometheus_http_response_size_bytes"));
1867 Ok(())
1868 }
1869
1870 #[test]
1871 fn test_target_metadata_deserialization_2() -> Result<(), anyhow::Error> {
1872 let data = r#"
1873[
1874 {
1875 "target": {
1876 "instance": "127.0.0.1:9090",
1877 "job": "prometheus"
1878 },
1879 "metric": "prometheus_treecache_zookeeper_failures_total",
1880 "type": "counter",
1881 "help": "The total number of ZooKeeper failures.",
1882 "unit": ""
1883 },
1884 {
1885 "target": {
1886 "instance": "127.0.0.1:9090",
1887 "job": "prometheus"
1888 },
1889 "metric": "prometheus_tsdb_reloads_total",
1890 "type": "counter",
1891 "help": "Number of times the database reloaded block data from disk.",
1892 "unit": ""
1893 }
1894]
1895"#;
1896 serde_json::from_str::<Vec<TargetMetadata>>(data)?;
1897 Ok(())
1898 }
1899
1900 #[test]
1901 fn test_metric_metadata_deserialization() -> Result<(), anyhow::Error> {
1902 let data = r#"
1903{
1904 "cortex_ring_tokens": [
1905 {
1906 "type": "gauge",
1907 "help": "Number of tokens in the ring",
1908 "unit": ""
1909 }
1910 ],
1911 "http_requests_total": [
1912 {
1913 "type": "counter",
1914 "help": "Number of HTTP requests",
1915 "unit": ""
1916 },
1917 {
1918 "type": "counter",
1919 "help": "Amount of HTTP requests",
1920 "unit": ""
1921 }
1922 ]
1923}
1924"#;
1925 let metadata = serde_json::from_str::<HashMap<String, Vec<MetricMetadata>>>(data)?;
1926 assert!(metadata.len() == 2);
1927 assert!(metadata
1928 .get("cortex_ring_tokens")
1929 .is_some_and(|v| v[0].metric_type().is_gauge()
1930 && v[0].help() == "Number of tokens in the ring"
1931 && v[0].unit().is_empty()));
1932 assert!(metadata.get("http_requests_total").is_some_and(|v| v[0]
1933 .metric_type()
1934 .is_counter()
1935 && v[0].help() == "Number of HTTP requests"
1936 && v[0].unit().is_empty()));
1937 Ok(())
1938 }
1939
1940 #[test]
1941 fn test_alertmanagers_deserialization() -> Result<(), anyhow::Error> {
1942 let data = r#"
1943{
1944 "activeAlertmanagers": [
1945 {
1946 "url": "http://127.0.0.1:9090/api/v1/alerts"
1947 }
1948 ],
1949 "droppedAlertmanagers": [
1950 {
1951 "url": "http://127.0.0.1:9093/api/v1/alerts"
1952 }
1953 ]
1954}
1955"#;
1956 serde_json::from_str::<Alertmanagers>(data)?;
1957 Ok(())
1958 }
1959
1960 #[test]
1961 fn test_buildinformation_deserialization() -> Result<(), anyhow::Error> {
1962 let data = r#"
1963{
1964 "version": "2.13.1",
1965 "revision": "cb7cbad5f9a2823a622aaa668833ca04f50a0ea7",
1966 "branch": "master",
1967 "buildUser": "julius@desktop",
1968 "buildDate": "20191102-16:19:51",
1969 "goVersion": "go1.13.1"
1970}
1971"#;
1972 serde_json::from_str::<BuildInformation>(data)?;
1973 Ok(())
1974 }
1975
1976 #[test]
1977 fn test_runtimeinformation_deserialization() -> Result<(), anyhow::Error> {
1978 let data = r#"
1979{
1980 "startTime": "2019-11-02T17:23:59.301361365+01:00",
1981 "CWD": "/",
1982 "reloadConfigSuccess": true,
1983 "lastConfigTime": "2019-11-02T17:23:59+01:00",
1984 "timeSeriesCount": 873,
1985 "corruptionCount": 0,
1986 "goroutineCount": 48,
1987 "GOMAXPROCS": 4,
1988 "GOGC": "",
1989 "GODEBUG": "",
1990 "storageRetention": "15d"
1991}
1992"#;
1993 serde_json::from_str::<RuntimeInformation>(data)?;
1994 Ok(())
1995 }
1996
1997 #[test]
1998 fn test_tsdb_stats_deserialization() -> Result<(), anyhow::Error> {
1999 let data = r#"
2000{
2001 "headStats": {
2002 "numSeries": 508,
2003 "chunkCount": 937,
2004 "minTime": 1591516800000,
2005 "maxTime": 1598896800143
2006 },
2007 "seriesCountByMetricName": [
2008 {
2009 "name": "net_conntrack_dialer_conn_failed_total",
2010 "value": 20
2011 },
2012 {
2013 "name": "prometheus_http_request_duration_seconds_bucket",
2014 "value": 20
2015 }
2016 ],
2017 "labelValueCountByLabelName": [
2018 {
2019 "name": "__name__",
2020 "value": 211
2021 },
2022 {
2023 "name": "event",
2024 "value": 3
2025 }
2026 ],
2027 "memoryInBytesByLabelName": [
2028 {
2029 "name": "__name__",
2030 "value": 8266
2031 },
2032 {
2033 "name": "instance",
2034 "value": 28
2035 }
2036 ],
2037 "seriesCountByLabelValuePair": [
2038 {
2039 "name": "job=prometheus",
2040 "value": 425
2041 },
2042 {
2043 "name": "instance=localhost:9090",
2044 "value": 425
2045 }
2046 ]
2047}
2048"#;
2049 serde_json::from_str::<TsdbStatistics>(data)?;
2050 Ok(())
2051 }
2052
2053 #[test]
2054 fn test_wal_replay_deserialization() -> Result<(), anyhow::Error> {
2055 let data = r#"
2056{
2057 "min": 2,
2058 "max": 5,
2059 "current": 40,
2060 "state": "waiting"
2061}
2062"#;
2063 let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
2064 assert!(result.is_ok());
2065
2066 let data = r#"
2067{
2068 "min": 2,
2069 "max": 5,
2070 "current": 40,
2071 "state": "in progress"
2072}
2073"#;
2074 let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
2075 assert!(result.is_ok());
2076
2077 let data = r#"
2078{
2079 "min": 2,
2080 "max": 5,
2081 "current": 40,
2082 "state": "done"
2083}
2084"#;
2085 let result: Result<WalReplayStatistics, serde_json::Error> = serde_json::from_str(data);
2086 assert!(result.is_ok());
2087
2088 let data = r#"
2089{
2090 "min": 2,
2091 "max": 5,
2092 "current": 40
2093}
2094"#;
2095 serde_json::from_str::<WalReplayStatistics>(data)?;
2096 Ok(())
2097 }
2098}