1use serde_json::Value;
48
49use crate::{
50 compiler::{
51 aggregate_types::{AggregateFunction, HavingOperator, TemporalBucket},
52 aggregation::{
53 AggregateSelection, AggregationRequest, GroupBySelection, HavingCondition,
54 OrderByClause, OrderDirection,
55 },
56 fact_table::FactTableMetadata,
57 },
58 db::where_clause::{WhereClause, WhereOperator},
59 error::{FraiseQLError, Result},
60};
61
62pub struct AggregateQueryParser;
64
65impl AggregateQueryParser {
66 pub fn parse(query_json: &Value, metadata: &FactTableMetadata) -> Result<AggregationRequest> {
94 let table_name = query_json
96 .get("table")
97 .and_then(|v| v.as_str())
98 .ok_or_else(|| FraiseQLError::Validation {
99 message: "Missing 'table' field in aggregate query".to_string(),
100 path: None,
101 })?
102 .to_string();
103
104 let where_clause = if let Some(where_obj) = query_json.get("where") {
106 Some(Self::parse_where_clause(where_obj)?)
107 } else {
108 None
109 };
110
111 let group_by = if let Some(group_by_obj) = query_json.get("groupBy") {
113 Self::parse_group_by(group_by_obj, metadata)?
114 } else {
115 vec![]
116 };
117
118 let aggregates = if let Some(agg_array) = query_json.get("aggregates") {
120 Self::parse_aggregates(agg_array, metadata)?
121 } else {
122 vec![]
123 };
124
125 let having = if let Some(having_obj) = query_json.get("having") {
127 Self::parse_having(having_obj, &aggregates, metadata)?
128 } else {
129 vec![]
130 };
131
132 let order_by = if let Some(order_obj) = query_json.get("orderBy") {
134 Self::parse_order_by(order_obj)?
135 } else {
136 vec![]
137 };
138
139 let limit = query_json.get("limit").and_then(|v| v.as_u64()).map(|n| n as u32);
141
142 let offset = query_json.get("offset").and_then(|v| v.as_u64()).map(|n| n as u32);
144
145 Ok(AggregationRequest {
146 table_name,
147 where_clause,
148 group_by,
149 aggregates,
150 having,
151 order_by,
152 limit,
153 offset,
154 })
155 }
156
157 fn parse_where_clause(where_obj: &Value) -> Result<WhereClause> {
163 let Some(obj) = where_obj.as_object() else {
164 return Ok(WhereClause::And(vec![]));
165 };
166
167 let mut conditions = Vec::new();
168
169 for (key, value) in obj {
170 if let Some((field, operator_str)) = Self::parse_where_field_and_operator(key)? {
173 let operator = WhereOperator::from_str(operator_str)?;
174
175 conditions.push(WhereClause::Field {
176 path: vec![field.to_string()],
177 operator,
178 value: value.clone(),
179 });
180 }
181 }
182
183 Ok(WhereClause::And(conditions))
184 }
185
186 fn parse_where_field_and_operator(key: &str) -> Result<Option<(&str, &str)>> {
188 if let Some(last_underscore) = key.rfind('_') {
190 let field = &key[..last_underscore];
191 let operator = &key[last_underscore + 1..];
192
193 match WhereOperator::from_str(operator) {
195 Ok(_) => Ok(Some((field, operator))),
196 Err(_) => {
197 Ok(None)
200 },
201 }
202 } else {
203 Ok(None)
205 }
206 }
207
208 fn parse_group_by(
215 group_by_obj: &Value,
216 metadata: &FactTableMetadata,
217 ) -> Result<Vec<GroupBySelection>> {
218 let mut selections = Vec::new();
219
220 if let Some(obj) = group_by_obj.as_object() {
221 for (key, value) in obj {
222 if value.as_bool() == Some(true) {
223 if let Some(calendar_sel) = Self::try_parse_calendar_bucket(key, metadata)? {
226 selections.push(calendar_sel);
227 } else if let Some(bucket_sel) = Self::parse_temporal_bucket(key, metadata)? {
228 selections.push(bucket_sel);
230 } else {
231 selections.push(GroupBySelection::Dimension {
233 path: key.clone(),
234 alias: key.clone(),
235 });
236 }
237 } else if let Some(bucket_str) = value.as_str() {
238 let bucket = TemporalBucket::from_str(bucket_str)?;
240
241 if let Some(calendar_sel) =
243 Self::try_find_calendar_bucket(key, bucket, metadata)
244 {
245 selections.push(calendar_sel);
246 } else {
247 let column_exists =
250 metadata.denormalized_filters.iter().any(|f| f.name == *key);
251
252 if !column_exists {
253 return Err(FraiseQLError::Validation {
254 message: format!(
255 "Temporal bucketing column '{}' not found in denormalized filters",
256 key
257 ),
258 path: None,
259 });
260 }
261
262 selections.push(GroupBySelection::TemporalBucket {
263 column: key.clone(),
264 bucket,
265 alias: key.clone(),
266 });
267 }
268 }
269 }
270 }
271
272 Ok(selections)
273 }
274
275 fn parse_temporal_bucket(
277 key: &str,
278 metadata: &FactTableMetadata,
279 ) -> Result<Option<GroupBySelection>> {
280 for filter_col in &metadata.denormalized_filters {
282 for bucket in &[
283 ("_second", TemporalBucket::Second),
284 ("_minute", TemporalBucket::Minute),
285 ("_hour", TemporalBucket::Hour),
286 ("_day", TemporalBucket::Day),
287 ("_week", TemporalBucket::Week),
288 ("_month", TemporalBucket::Month),
289 ("_quarter", TemporalBucket::Quarter),
290 ("_year", TemporalBucket::Year),
291 ] {
292 let expected_key = format!("{}{}", filter_col.name, bucket.0);
293 if key == expected_key {
294 return Ok(Some(GroupBySelection::TemporalBucket {
295 column: filter_col.name.clone(),
296 bucket: bucket.1,
297 alias: key.to_string(),
298 }));
299 }
300 }
301 }
302
303 Ok(None)
304 }
305
306 fn try_parse_calendar_bucket(
311 key: &str,
312 metadata: &FactTableMetadata,
313 ) -> Result<Option<GroupBySelection>> {
314 for calendar_dim in &metadata.calendar_dimensions {
315 for (suffix, bucket_type) in &[
317 ("_second", TemporalBucket::Second),
318 ("_minute", TemporalBucket::Minute),
319 ("_hour", TemporalBucket::Hour),
320 ("_day", TemporalBucket::Day),
321 ("_week", TemporalBucket::Week),
322 ("_month", TemporalBucket::Month),
323 ("_quarter", TemporalBucket::Quarter),
324 ("_year", TemporalBucket::Year),
325 ] {
326 let expected_key = format!("{}{}", calendar_dim.source_column, suffix);
327 if key == expected_key {
328 if let Some((gran, bucket)) =
330 Self::find_calendar_bucket(calendar_dim, *bucket_type)
331 {
332 return Ok(Some(GroupBySelection::CalendarDimension {
333 source_column: calendar_dim.source_column.clone(),
334 calendar_column: gran.column_name.clone(),
335 json_key: bucket.json_key.clone(),
336 bucket: bucket.bucket_type,
337 alias: key.to_string(),
338 }));
339 }
340 }
341 }
342 }
343 Ok(None)
344 }
345
346 fn try_find_calendar_bucket(
350 column: &str,
351 bucket: TemporalBucket,
352 metadata: &FactTableMetadata,
353 ) -> Option<GroupBySelection> {
354 for calendar_dim in &metadata.calendar_dimensions {
355 if calendar_dim.source_column == column {
356 if let Some((gran, cal_bucket)) = Self::find_calendar_bucket(calendar_dim, bucket) {
357 return Some(GroupBySelection::CalendarDimension {
358 source_column: calendar_dim.source_column.clone(),
359 calendar_column: gran.column_name.clone(),
360 json_key: cal_bucket.json_key.clone(),
361 bucket: cal_bucket.bucket_type,
362 alias: column.to_string(),
363 });
364 }
365 }
366 }
367 None
368 }
369
370 fn find_calendar_bucket(
375 calendar_dim: &crate::compiler::fact_table::CalendarDimension,
376 bucket: TemporalBucket,
377 ) -> Option<(
378 &crate::compiler::fact_table::CalendarGranularity,
379 &crate::compiler::fact_table::CalendarBucket,
380 )> {
381 for granularity in &calendar_dim.granularities {
382 for cal_bucket in &granularity.buckets {
383 if cal_bucket.bucket_type == bucket {
384 return Some((granularity, cal_bucket));
385 }
386 }
387 }
388 None
389 }
390
391 fn parse_aggregates(
393 agg_array: &Value,
394 metadata: &FactTableMetadata,
395 ) -> Result<Vec<AggregateSelection>> {
396 let mut aggregates = Vec::new();
397
398 if let Some(arr) = agg_array.as_array() {
399 for item in arr {
400 if let Some(obj) = item.as_object() {
401 for (agg_name, _value) in obj {
403 aggregates.push(Self::parse_aggregate_selection(agg_name, metadata)?);
404 }
405 }
406 }
407 }
408
409 Ok(aggregates)
410 }
411
412 fn parse_aggregate_selection(
414 agg_name: &str,
415 metadata: &FactTableMetadata,
416 ) -> Result<AggregateSelection> {
417 if agg_name == "count" {
419 return Ok(AggregateSelection::Count {
420 alias: "count".to_string(),
421 });
422 }
423
424 if agg_name == "count_distinct" {
427 let default_field = Self::extract_dimension_paths(metadata)
429 .first()
430 .cloned()
431 .unwrap_or_else(|| "id".to_string());
432 return Ok(AggregateSelection::CountDistinct {
433 field: default_field,
434 alias: "count_distinct".to_string(),
435 });
436 }
437
438 if let Some(stripped) = agg_name.strip_suffix("_count_distinct") {
440 let dimension_paths = Self::extract_dimension_paths(metadata);
442 if dimension_paths.iter().any(|p| p == stripped) {
443 return Ok(AggregateSelection::CountDistinct {
444 field: stripped.to_string(),
445 alias: agg_name.to_string(),
446 });
447 }
448 if metadata.measures.iter().any(|m| m.name == stripped) {
450 return Ok(AggregateSelection::CountDistinct {
451 field: stripped.to_string(),
452 alias: agg_name.to_string(),
453 });
454 }
455 return Err(FraiseQLError::Validation {
457 message: format!(
458 "COUNT DISTINCT field '{}' not found in dimensions or measures. Available: {:?}",
459 stripped, dimension_paths
460 ),
461 path: None,
462 });
463 }
464
465 for dimension_path in Self::extract_dimension_paths(metadata) {
468 if let Some(stripped) = agg_name.strip_suffix("_bool_and") {
469 if stripped == dimension_path {
470 return Ok(AggregateSelection::BoolAggregate {
471 field: dimension_path.clone(),
472 function: crate::compiler::aggregate_types::BoolAggregateFunction::And,
473 alias: agg_name.to_string(),
474 });
475 }
476 }
477 if let Some(stripped) = agg_name.strip_suffix("_bool_or") {
478 if stripped == dimension_path {
479 return Ok(AggregateSelection::BoolAggregate {
480 field: dimension_path.clone(),
481 function: crate::compiler::aggregate_types::BoolAggregateFunction::Or,
482 alias: agg_name.to_string(),
483 });
484 }
485 }
486 }
487
488 for measure in &metadata.measures {
490 for func in &[
491 ("_sum", AggregateFunction::Sum),
492 ("_avg", AggregateFunction::Avg),
493 ("_min", AggregateFunction::Min),
494 ("_max", AggregateFunction::Max),
495 ("_stddev", AggregateFunction::Stddev),
496 ("_variance", AggregateFunction::Variance),
497 ("_array_agg", AggregateFunction::ArrayAgg),
499 ("_json_agg", AggregateFunction::JsonAgg),
500 ("_jsonb_agg", AggregateFunction::JsonbAgg),
501 ("_string_agg", AggregateFunction::StringAgg),
502 ] {
503 let expected_name = format!("{}{}", measure.name, func.0);
504 if agg_name == expected_name {
505 return Ok(AggregateSelection::MeasureAggregate {
506 measure: measure.name.clone(),
507 function: func.1,
508 alias: agg_name.to_string(),
509 });
510 }
511 }
512 }
513
514 for dimension_path in Self::extract_dimension_paths(metadata) {
517 for func in &[
518 ("_array_agg", AggregateFunction::ArrayAgg),
519 ("_json_agg", AggregateFunction::JsonAgg),
520 ("_jsonb_agg", AggregateFunction::JsonbAgg),
521 ("_string_agg", AggregateFunction::StringAgg),
522 ] {
523 let expected_name = format!("{}{}", dimension_path, func.0);
524 if agg_name == expected_name {
525 return Ok(AggregateSelection::MeasureAggregate {
527 measure: dimension_path.clone(),
528 function: func.1,
529 alias: agg_name.to_string(),
530 });
531 }
532 }
533 }
534
535 Err(FraiseQLError::Validation {
536 message: format!("Unknown aggregate selection: {agg_name}"),
537 path: None,
538 })
539 }
540
541 fn extract_dimension_paths(metadata: &FactTableMetadata) -> Vec<String> {
543 let mut paths = Vec::new();
544
545 for dim_path in &metadata.dimensions.paths {
547 paths.push(dim_path.name.clone());
548 }
549
550 for filter in &metadata.denormalized_filters {
552 paths.push(filter.name.clone());
553 }
554
555 paths
556 }
557
558 fn parse_having(
560 having_obj: &Value,
561 aggregates: &[AggregateSelection],
562 _metadata: &FactTableMetadata,
563 ) -> Result<Vec<HavingCondition>> {
564 let mut conditions = Vec::new();
565
566 if let Some(obj) = having_obj.as_object() {
567 for (key, value) in obj {
568 if let Some((agg_name, operator)) = Self::parse_having_key(key) {
570 let aggregate = aggregates
572 .iter()
573 .find(|a| a.alias() == agg_name)
574 .ok_or_else(|| FraiseQLError::Validation {
575 message: format!(
576 "HAVING condition references non-selected aggregate: {agg_name}"
577 ),
578 path: None,
579 })?
580 .clone();
581
582 conditions.push(HavingCondition {
583 aggregate,
584 operator,
585 value: value.clone(),
586 });
587 }
588 }
589 }
590
591 Ok(conditions)
592 }
593
594 fn parse_having_key(key: &str) -> Option<(&str, HavingOperator)> {
596 for (suffix, op) in &[
597 ("_gt", HavingOperator::Gt),
598 ("_gte", HavingOperator::Gte),
599 ("_lt", HavingOperator::Lt),
600 ("_lte", HavingOperator::Lte),
601 ("_eq", HavingOperator::Eq),
602 ("_neq", HavingOperator::Neq),
603 ] {
604 if let Some(agg_name) = key.strip_suffix(suffix) {
605 return Some((agg_name, *op));
606 }
607 }
608 None
609 }
610
611 fn parse_order_by(order_obj: &Value) -> Result<Vec<OrderByClause>> {
613 let mut clauses = Vec::new();
614
615 if let Some(obj) = order_obj.as_object() {
616 for (field, value) in obj {
617 let direction = match value.as_str() {
618 Some("ASC" | "asc") => OrderDirection::Asc,
619 Some("DESC" | "desc") => OrderDirection::Desc,
620 _ => OrderDirection::Asc, };
622
623 clauses.push(OrderByClause {
624 field: field.clone(),
625 direction,
626 });
627 }
628 }
629
630 Ok(clauses)
631 }
632}
633
634#[cfg(test)]
635mod tests {
636 use serde_json::json;
637
638 use super::*;
639 use crate::compiler::fact_table::{DimensionColumn, FilterColumn, MeasureColumn, SqlType};
640
641 fn create_test_metadata() -> FactTableMetadata {
642 use crate::compiler::fact_table::DimensionPath;
643
644 FactTableMetadata {
645 table_name: "tf_sales".to_string(),
646 measures: vec![
647 MeasureColumn {
648 name: "revenue".to_string(),
649 sql_type: SqlType::Decimal,
650 nullable: false,
651 },
652 MeasureColumn {
653 name: "quantity".to_string(),
654 sql_type: SqlType::Int,
655 nullable: false,
656 },
657 ],
658 dimensions: DimensionColumn {
659 name: "dimensions".to_string(),
660 paths: vec![
661 DimensionPath {
662 name: "category".to_string(),
663 json_path: "data->>'category'".to_string(),
664 data_type: "text".to_string(),
665 },
666 DimensionPath {
667 name: "product".to_string(),
668 json_path: "data->>'product'".to_string(),
669 data_type: "text".to_string(),
670 },
671 ],
672 },
673 denormalized_filters: vec![FilterColumn {
674 name: "occurred_at".to_string(),
675 sql_type: SqlType::Timestamp,
676 indexed: true,
677 }],
678 calendar_dimensions: vec![],
679 }
680 }
681
682 #[test]
683 fn test_parse_simple_count() {
684 let metadata = create_test_metadata();
685 let query = json!({
686 "table": "tf_sales",
687 "aggregates": [
688 {"count": {}}
689 ]
690 });
691
692 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
693
694 assert_eq!(request.table_name, "tf_sales");
695 assert_eq!(request.aggregates.len(), 1);
696 assert_eq!(request.aggregates[0].alias(), "count");
697 }
698
699 #[test]
700 fn test_parse_group_by_dimension() {
701 let metadata = create_test_metadata();
702 let query = json!({
703 "table": "tf_sales",
704 "groupBy": {
705 "category": true
706 },
707 "aggregates": [
708 {"count": {}}
709 ]
710 });
711
712 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
713
714 assert_eq!(request.group_by.len(), 1);
715 match &request.group_by[0] {
716 GroupBySelection::Dimension { path, alias } => {
717 assert_eq!(path, "category");
718 assert_eq!(alias, "category");
719 },
720 _ => panic!("Expected Dimension selection"),
721 }
722 }
723
724 #[test]
725 fn test_parse_temporal_bucket() {
726 let metadata = create_test_metadata();
727 let query = json!({
728 "table": "tf_sales",
729 "groupBy": {
730 "occurred_at_day": true
731 },
732 "aggregates": [
733 {"count": {}}
734 ]
735 });
736
737 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
738
739 assert_eq!(request.group_by.len(), 1);
740 match &request.group_by[0] {
741 GroupBySelection::TemporalBucket {
742 column,
743 bucket,
744 alias,
745 } => {
746 assert_eq!(column, "occurred_at");
747 assert_eq!(*bucket, TemporalBucket::Day);
748 assert_eq!(alias, "occurred_at_day");
749 },
750 _ => panic!("Expected TemporalBucket selection"),
751 }
752 }
753
754 #[test]
755 fn test_parse_multiple_aggregates() {
756 let metadata = create_test_metadata();
757 let query = json!({
758 "table": "tf_sales",
759 "aggregates": [
760 {"count": {}},
761 {"revenue_sum": {}},
762 {"revenue_avg": {}},
763 {"quantity_max": {}}
764 ]
765 });
766
767 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
768
769 assert_eq!(request.aggregates.len(), 4);
770 assert_eq!(request.aggregates[0].alias(), "count");
771 assert_eq!(request.aggregates[1].alias(), "revenue_sum");
772 assert_eq!(request.aggregates[2].alias(), "revenue_avg");
773 assert_eq!(request.aggregates[3].alias(), "quantity_max");
774 }
775
776 #[test]
777 fn test_parse_having_condition() {
778 let metadata = create_test_metadata();
779 let query = json!({
780 "table": "tf_sales",
781 "aggregates": [
782 {"revenue_sum": {}}
783 ],
784 "having": {
785 "revenue_sum_gt": 1000
786 }
787 });
788
789 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
790
791 assert_eq!(request.having.len(), 1);
792 assert_eq!(request.having[0].operator, HavingOperator::Gt);
793 assert_eq!(request.having[0].value, json!(1000));
794 }
795
796 #[test]
797 fn test_parse_order_by() {
798 let metadata = create_test_metadata();
799 let query = json!({
800 "table": "tf_sales",
801 "aggregates": [
802 {"revenue_sum": {}}
803 ],
804 "orderBy": {
805 "revenue_sum": "DESC"
806 }
807 });
808
809 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
810
811 assert_eq!(request.order_by.len(), 1);
812 assert_eq!(request.order_by[0].field, "revenue_sum");
813 assert_eq!(request.order_by[0].direction, OrderDirection::Desc);
814 }
815
816 #[test]
817 fn test_parse_limit_offset() {
818 let metadata = create_test_metadata();
819 let query = json!({
820 "table": "tf_sales",
821 "aggregates": [
822 {"count": {}}
823 ],
824 "limit": 10,
825 "offset": 5
826 });
827
828 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
829
830 assert_eq!(request.limit, Some(10));
831 assert_eq!(request.offset, Some(5));
832 }
833
834 #[test]
835 fn test_parse_complex_query() {
836 let metadata = create_test_metadata();
837 let query = json!({
838 "table": "tf_sales",
839 "groupBy": {
840 "category": true,
841 "occurred_at_month": true
842 },
843 "aggregates": [
844 {"count": {}},
845 {"revenue_sum": {}},
846 {"revenue_avg": {}},
847 {"quantity_sum": {}}
848 ],
849 "having": {
850 "revenue_sum_gt": 1000,
851 "count_gte": 5
852 },
853 "orderBy": {
854 "revenue_sum": "DESC",
855 "count": "ASC"
856 },
857 "limit": 20
858 });
859
860 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
861
862 assert_eq!(request.table_name, "tf_sales");
863 assert_eq!(request.group_by.len(), 2);
864 assert_eq!(request.aggregates.len(), 4);
865 assert_eq!(request.having.len(), 2);
866 assert_eq!(request.order_by.len(), 2);
867 assert_eq!(request.limit, Some(20));
868 }
869
870 #[test]
871 fn test_parse_count_distinct_default() {
872 let metadata = create_test_metadata();
873 let query = json!({
874 "table": "tf_sales",
875 "aggregates": [
876 {"count_distinct": {}}
877 ]
878 });
879
880 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
881
882 assert_eq!(request.aggregates.len(), 1);
883 match &request.aggregates[0] {
884 AggregateSelection::CountDistinct { field, alias } => {
885 assert_eq!(field, "category");
887 assert_eq!(alias, "count_distinct");
888 },
889 _ => panic!("Expected CountDistinct selection"),
890 }
891 }
892
893 #[test]
894 fn test_parse_count_distinct_with_field() {
895 let metadata = create_test_metadata();
896 let query = json!({
897 "table": "tf_sales",
898 "aggregates": [
899 {"product_count_distinct": {}}
900 ]
901 });
902
903 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
904
905 assert_eq!(request.aggregates.len(), 1);
906 match &request.aggregates[0] {
907 AggregateSelection::CountDistinct { field, alias } => {
908 assert_eq!(field, "product");
909 assert_eq!(alias, "product_count_distinct");
910 },
911 _ => panic!("Expected CountDistinct selection"),
912 }
913 }
914
915 #[test]
916 fn test_parse_count_distinct_on_measure() {
917 let metadata = create_test_metadata();
918 let query = json!({
919 "table": "tf_sales",
920 "aggregates": [
921 {"revenue_count_distinct": {}}
922 ]
923 });
924
925 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
926
927 assert_eq!(request.aggregates.len(), 1);
928 match &request.aggregates[0] {
929 AggregateSelection::CountDistinct { field, alias } => {
930 assert_eq!(field, "revenue");
931 assert_eq!(alias, "revenue_count_distinct");
932 },
933 _ => panic!("Expected CountDistinct selection"),
934 }
935 }
936
937 #[test]
938 fn test_parse_count_distinct_invalid_field() {
939 let metadata = create_test_metadata();
940 let query = json!({
941 "table": "tf_sales",
942 "aggregates": [
943 {"nonexistent_count_distinct": {}}
944 ]
945 });
946
947 let result = AggregateQueryParser::parse(&query, &metadata);
948
949 assert!(result.is_err());
950 let err = result.unwrap_err();
951 match err {
952 FraiseQLError::Validation { message, .. } => {
953 assert!(message.contains("COUNT DISTINCT field 'nonexistent' not found"));
954 },
955 _ => panic!("Expected Validation error"),
956 }
957 }
958
959 #[test]
960 fn test_parse_multiple_count_distinct() {
961 let metadata = create_test_metadata();
962 let query = json!({
963 "table": "tf_sales",
964 "aggregates": [
965 {"count": {}},
966 {"category_count_distinct": {}},
967 {"product_count_distinct": {}},
968 {"revenue_sum": {}}
969 ]
970 });
971
972 let request = AggregateQueryParser::parse(&query, &metadata).unwrap();
973
974 assert_eq!(request.aggregates.len(), 4);
975 assert_eq!(request.aggregates[0].alias(), "count");
976 assert_eq!(request.aggregates[1].alias(), "category_count_distinct");
977 assert_eq!(request.aggregates[2].alias(), "product_count_distinct");
978 assert_eq!(request.aggregates[3].alias(), "revenue_sum");
979 }
980}