1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
use std::collections::BTreeMap;

use command::{
    filtered_metrics::Inner, AggregatedMetrics, BackendMetrics, Bucket, FilteredHistogram,
    FilteredMetrics,
};
use prost::UnknownEnumValue;

/// Contains all types received by and sent from Sōzu
pub mod command;

/// Implementation of fmt::Display for the protobuf types, used in the CLI
pub mod display;

#[derive(thiserror::Error, Debug)]
pub enum DisplayError {
    #[error("Could not display content")]
    DisplayContent(String),
    #[error("Error while parsing response to JSON")]
    Json(serde_json::Error),
    #[error("got the wrong response content type: {0}")]
    WrongResponseType(String),
    #[error("Could not format the datetime to ISO 8601")]
    DateTime,
    #[error("unrecognized protobuf variant: {0}")]
    DecodeError(UnknownEnumValue),
}

// Simple helper to build ResponseContent from ContentType
impl From<command::response_content::ContentType> for command::ResponseContent {
    fn from(value: command::response_content::ContentType) -> Self {
        Self {
            content_type: Some(value),
        }
    }
}

// Simple helper to build Request from RequestType
impl From<command::request::RequestType> for command::Request {
    fn from(value: command::request::RequestType) -> Self {
        Self {
            request_type: Some(value),
        }
    }
}

impl AggregatedMetrics {
    /// Merge metrics that were received from several workers
    ///
    /// Each worker gather the same kind of metrics,
    /// for its own proxying logic, and for the same clusters with their backends.
    /// This means we have to reduce each metric from N instances to 1.
    pub fn merge_metrics(&mut self) {
        // avoid copying the worker metrics, by taking them
        let workers = std::mem::take(&mut self.workers);

        for (_worker_id, worker) in workers {
            for (metric_name, new_value) in worker.proxy {
                if new_value.is_mergeable() {
                    self.proxying
                        .entry(metric_name)
                        .and_modify(|old_value| old_value.merge(&new_value))
                        .or_insert(new_value);
                }
            }

            for (cluster_id, mut cluster_metrics) in worker.clusters {
                for (metric_name, new_value) in cluster_metrics.cluster {
                    if new_value.is_mergeable() {
                        let cluster = self.clusters.entry(cluster_id.to_owned()).or_default();

                        cluster
                            .cluster
                            .entry(metric_name)
                            .and_modify(|old_value| old_value.merge(&new_value))
                            .or_insert(new_value);
                    }
                }

                for backend in cluster_metrics.backends.drain(..) {
                    for (metric_name, new_value) in backend.metrics {
                        if new_value.is_mergeable() {
                            let cluster = self.clusters.entry(cluster_id.to_owned()).or_default();

                            let found_backend = cluster
                                .backends
                                .iter_mut()
                                .find(|present| &present.backend_id == &backend.backend_id);

                            if let Some(existing_backend) = found_backend {
                                let _ = existing_backend
                                    .metrics
                                    .entry(metric_name)
                                    .and_modify(|old_value| old_value.merge(&new_value))
                                    .or_insert(new_value);
                            } else {
                                cluster.backends.push(BackendMetrics {
                                    backend_id: backend.backend_id.clone(),
                                    metrics: BTreeMap::from([(metric_name, new_value)]),
                                });
                            };
                        }
                    }
                }
            }
        }
    }
}

impl FilteredMetrics {
    pub fn merge(&mut self, right: &Self) {
        match (&self.inner, &right.inner) {
            (Some(Inner::Gauge(a)), Some(Inner::Gauge(b))) => {
                *self = Self {
                    inner: Some(Inner::Gauge(a + b)),
                };
            }
            (Some(Inner::Count(a)), Some(Inner::Count(b))) => {
                *self = Self {
                    inner: Some(Inner::Count(a + b)),
                };
            }
            (Some(Inner::Histogram(a)), Some(Inner::Histogram(b))) => {
                let longest_len = a.buckets.len().max(b.buckets.len());

                let buckets = (0..longest_len)
                    .map(|i| Bucket {
                        le: (1 << i) - 1, // the bucket less-or-equal limits are normalized: 0, 1, 3, 7, 15, ...
                        count: a
                            .buckets
                            .get(i)
                            .and_then(|buck| Some(buck.count))
                            .unwrap_or(0)
                            + b.buckets
                                .get(i)
                                .and_then(|buck| Some(buck.count))
                                .unwrap_or(0),
                    })
                    .collect();

                *self = Self {
                    inner: Some(Inner::Histogram(FilteredHistogram {
                        count: a.count + b.count,
                        sum: a.sum + b.sum,
                        buckets,
                    })),
                };
            }
            _ => {}
        }
    }

    fn is_mergeable(&self) -> bool {
        match &self.inner {
            Some(Inner::Gauge(_)) | Some(Inner::Count(_)) | Some(Inner::Histogram(_)) => true,
            // Inner::Time and Inner::Timeserie are never used in Sōzu
            Some(Inner::Time(_))
            | Some(Inner::Percentiles(_))
            | Some(Inner::TimeSerie(_))
            | None => false,
        }
    }
}