use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use super::bucket::GetDocCount;
use super::metric::{
ExtendedStats, PercentilesMetricResult, SingleMetricResult, Stats, TopHitsMetricResult,
};
use super::{AggregationError, Key};
use crate::LucivyError;
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
pub struct AggregationResults(pub FxHashMap<String, AggregationResult>);
impl AggregationResults {
pub(crate) fn get_bucket_count(&self) -> u64 {
self.0
.values()
.map(|agg| agg.get_bucket_count())
.sum::<u64>()
}
pub(crate) fn get_value_from_aggregation(
&self,
name: &str,
agg_property: &str,
) -> crate::Result<Option<f64>> {
if let Some(agg) = self.0.get(name) {
agg.get_value_from_aggregation(name, agg_property)
} else {
Err(LucivyError::InternalError(format!(
"Can't find aggregation {name:?} in sub-aggregations"
)))
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum AggregationResult {
BucketResult(BucketResult),
MetricResult(MetricResult),
}
impl AggregationResult {
pub(crate) fn get_bucket_count(&self) -> u64 {
match self {
AggregationResult::BucketResult(bucket) => bucket.get_bucket_count(),
AggregationResult::MetricResult(_) => 0,
}
}
pub(crate) fn get_value_from_aggregation(
&self,
_name: &str,
agg_property: &str,
) -> crate::Result<Option<f64>> {
match self {
AggregationResult::BucketResult(_bucket) => Err(LucivyError::InternalError(
"Tried to retrieve value from bucket aggregation. This is not supported and \
should not happen during collection phase, but should be caught during validation"
.to_string(),
)),
AggregationResult::MetricResult(metric) => metric.get_value(agg_property),
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum MetricResult {
Average(SingleMetricResult),
Count(SingleMetricResult),
Max(SingleMetricResult),
Min(SingleMetricResult),
Stats(Stats),
ExtendedStats(Box<ExtendedStats>),
Sum(SingleMetricResult),
Percentiles(PercentilesMetricResult),
TopHits(TopHitsMetricResult),
Cardinality(SingleMetricResult),
}
impl MetricResult {
fn get_value(&self, agg_property: &str) -> crate::Result<Option<f64>> {
match self {
MetricResult::Average(avg) => Ok(avg.value),
MetricResult::Count(count) => Ok(count.value),
MetricResult::Max(max) => Ok(max.value),
MetricResult::Min(min) => Ok(min.value),
MetricResult::Stats(stats) => stats.get_value(agg_property),
MetricResult::ExtendedStats(extended_stats) => extended_stats.get_value(agg_property),
MetricResult::Sum(sum) => Ok(sum.value),
MetricResult::Percentiles(_) => Err(LucivyError::AggregationError(
AggregationError::InvalidRequest("percentiles can't be used to order".to_string()),
)),
MetricResult::TopHits(_) => Err(LucivyError::AggregationError(
AggregationError::InvalidRequest("top_hits can't be used to order".to_string()),
)),
MetricResult::Cardinality(card) => Ok(card.value),
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum BucketResult {
Range {
buckets: BucketEntries<RangeBucketEntry>,
},
Histogram {
buckets: BucketEntries<BucketEntry>,
},
Terms {
buckets: Vec<BucketEntry>,
sum_other_doc_count: u64,
#[serde(skip_serializing_if = "Option::is_none")]
doc_count_error_upper_bound: Option<u64>,
},
Filter(FilterBucketResult),
}
impl BucketResult {
pub(crate) fn get_bucket_count(&self) -> u64 {
match self {
BucketResult::Range { buckets } => {
buckets.iter().map(|bucket| bucket.get_bucket_count()).sum()
}
BucketResult::Histogram { buckets } => {
buckets.iter().map(|bucket| bucket.get_bucket_count()).sum()
}
BucketResult::Terms {
buckets,
sum_other_doc_count: _,
doc_count_error_upper_bound: _,
} => buckets.iter().map(|bucket| bucket.get_bucket_count()).sum(),
BucketResult::Filter(filter_result) => {
filter_result.sub_aggregations.get_bucket_count()
}
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum BucketEntries<T> {
Vec(Vec<T>),
HashMap(FxHashMap<String, T>),
}
impl<T> BucketEntries<T> {
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = &'a T> + 'a> {
match self {
BucketEntries::Vec(vec) => Box::new(vec.iter()),
BucketEntries::HashMap(map) => Box::new(map.values()),
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BucketEntry {
#[serde(skip_serializing_if = "Option::is_none")]
pub key_as_string: Option<String>,
pub key: Key,
pub doc_count: u64,
#[serde(flatten)]
pub sub_aggregation: AggregationResults,
}
impl BucketEntry {
pub(crate) fn get_bucket_count(&self) -> u64 {
1 + self.sub_aggregation.get_bucket_count()
}
}
impl GetDocCount for &BucketEntry {
fn doc_count(&self) -> u64 {
self.doc_count
}
}
impl GetDocCount for BucketEntry {
fn doc_count(&self) -> u64 {
self.doc_count
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RangeBucketEntry {
pub key: Key,
pub doc_count: u64,
#[serde(flatten)]
pub sub_aggregation: AggregationResults,
#[serde(skip_serializing_if = "Option::is_none")]
pub from: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub to: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub from_as_string: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub to_as_string: Option<String>,
}
impl RangeBucketEntry {
pub(crate) fn get_bucket_count(&self) -> u64 {
1 + self.sub_aggregation.get_bucket_count()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FilterBucketResult {
pub doc_count: u64,
#[serde(flatten)]
pub sub_aggregations: AggregationResults,
}