1use serde_json::Value;
2
3use crate::{Error, apply_stats_operation, print_data_info, string_ops, value_to_string};
4
5pub fn apply_simple_filter(data: Vec<Value>, filter: &str) -> Result<Vec<Value>, Error> {
6 if filter.starts_with("select(") && filter.ends_with(")") {
7 let condition = &filter[7..filter.len() - 1];
9
10 if condition.contains(" | ") {
12 apply_filter_with_string_operations(data, condition)
13 } else {
14 apply_existing_simple_filter(data, condition)
15 }
16 } else {
17 Err(Error::InvalidQuery(format!(
18 "Unsupported filter: {}",
19 filter
20 )))
21 }
22}
23
24fn apply_filter_with_string_operations(
26 data: Vec<Value>,
27 condition: &str,
28) -> Result<Vec<Value>, Error> {
29 let (condition, is_negated) = parse_not_condition_with_parentheses(condition)?;
31
32 let parts: Vec<&str> = condition.split(" | ").map(|s| s.trim()).collect();
33
34 if parts.len() < 2 {
35 return Err(Error::InvalidQuery("Invalid filter condition".to_string()));
36 }
37
38 let field_access = parts[0];
39 let string_operations: Vec<&str> = parts[1..].to_vec();
40
41 let last_operation = string_operations
43 .last()
44 .ok_or_else(|| Error::InvalidQuery("Missing comparison operation".to_string()))?;
45
46 if !is_comparison_operation(last_operation) {
47 return Err(Error::InvalidQuery(
48 "Last operation must be a comparison".to_string(),
49 ));
50 }
51
52 let mut results = Vec::new();
53
54 for item in data {
55 let field_value = extract_field_value(&item, field_access)?;
57
58 let final_value = string_ops::apply_string_pipeline(&field_value, &string_operations)?;
60
61 let condition_result = matches!(final_value, Value::Bool(true));
63 let final_result = if is_negated {
64 !condition_result
65 } else {
66 condition_result
67 };
68
69 if final_result {
70 results.push(item);
71 }
72 }
73
74 Ok(results)
75}
76
77fn is_comparison_operation(operation: &str) -> bool {
79 let trimmed = operation.trim();
80
81 trimmed.starts_with("contains(")
82 || trimmed.starts_with("starts_with(")
83 || trimmed.starts_with("ends_with(")
84 || trimmed == "=="
85 || trimmed == "!="
86 || trimmed.starts_with("== ")
87 || trimmed.starts_with("!= ")
88}
89
90fn apply_existing_simple_filter(data: Vec<Value>, condition: &str) -> Result<Vec<Value>, Error> {
92 let (condition, is_negated) = parse_not_condition_with_parentheses(condition)?;
94
95 let (field_path, operator, value) = parse_condition(&condition)?;
97
98 let filtered: Vec<Value> = data
100 .into_iter()
101 .filter(|item| {
102 let result = evaluate_condition(item, &field_path, &operator, &value);
103 if is_negated { !result } else { result }
104 })
105 .collect();
106
107 Ok(filtered)
108}
109
110fn parse_not_condition_with_parentheses(condition: &str) -> Result<(String, bool), Error> {
111 let trimmed = condition.trim();
112
113 if trimmed.starts_with("not ") {
114 let rest = trimmed[4..].trim();
115
116 if rest.starts_with('(') && rest.ends_with(')') {
118 let inner_condition = rest[1..rest.len() - 1].trim().to_string();
119 Ok((inner_condition, true))
120 } else {
121 Err(Error::InvalidQuery(
122 "not operator requires parentheses around condition: not (.condition)".to_string(),
123 ))
124 }
125 } else {
126 Ok((trimmed.to_string(), false))
127 }
128}
129
130fn parse_multi_field_spec(field_spec: &str) -> Vec<String> {
131 if field_spec.contains(',') {
132 field_spec
134 .split(',')
135 .map(|s| {
136 let trimmed = s.trim();
137 if trimmed == "." {
138 ".".to_string()
139 } else {
140 trimmed.trim_start_matches('.').to_string()
141 }
142 })
143 .collect()
144 } else {
145 let field_name = if field_spec == "." {
147 ".".to_string()
148 } else {
149 field_spec.trim_start_matches('.').to_string()
150 };
151 vec![field_name]
152 }
153}
154
155fn apply_multi_field_aggregation(
157 data: Vec<Value>,
158 operation: &str,
159 field_spec: &str,
160) -> Result<Vec<Value>, Error> {
161 let field_names = parse_multi_field_spec(field_spec);
162
163 if field_names.len() == 1 {
164 apply_single_field_aggregation(data, operation, &field_names[0])
166 } else {
167 let mut result_obj = serde_json::Map::new();
169
170 for field_name in field_names {
171 let field_result = apply_single_field_aggregation(data.clone(), operation, &field_name)?;
172 let field_key = if field_name == "." {
173 format!("{}_root", operation)
174 } else {
175 format!("{}_{}", operation, field_name)
176 };
177
178 if let Some(value) = field_result.first() {
179 result_obj.insert(field_key, value.clone());
180 }
181 }
182
183 Ok(vec![Value::Object(result_obj)])
184 }
185}
186
187fn apply_single_field_aggregation(
189 data: Vec<Value>,
190 operation: &str,
191 field_name: &str,
192) -> Result<Vec<Value>, Error> {
193 match operation {
194 "sum" => {
195 let sum: f64 = if field_name == "." {
196 data.iter().filter_map(|val| val.as_f64()).sum()
197 } else {
198 data.iter()
199 .filter_map(|item| item.get(field_name))
200 .filter_map(|val| val.as_f64())
201 .sum()
202 };
203
204 let round_sum = if sum.fract() == 0.0 { sum } else { (sum * 10.0).round() / 10.0 };
205 Ok(vec![Value::Number(serde_json::Number::from_f64(round_sum).unwrap())])
206 }
207 "avg" => {
208 let values: Vec<f64> = if field_name == "." {
209 data.iter().filter_map(|val| val.as_f64()).collect()
210 } else {
211 data.iter()
212 .filter_map(|item| item.get(field_name))
213 .filter_map(|val| val.as_f64())
214 .collect()
215 };
216
217 if values.is_empty() {
218 Ok(vec![Value::Null])
219 } else {
220 let avg = values.iter().sum::<f64>() / values.len() as f64;
221 let round_avg = (avg * 10.0).round() / 10.0;
222 Ok(vec![Value::Number(serde_json::Number::from_f64(round_avg).unwrap())])
223 }
224 }
225 "min" => {
226 let min_val = if field_name == "." {
227 data.iter()
228 .filter_map(|val| val.as_f64())
229 .fold(f64::INFINITY, f64::min)
230 } else {
231 data.iter()
232 .filter_map(|item| item.get(field_name))
233 .filter_map(|val| val.as_f64())
234 .fold(f64::INFINITY, f64::min)
235 };
236
237 if min_val == f64::INFINITY {
238 Ok(vec![Value::Null])
239 } else {
240 Ok(vec![Value::Number(serde_json::Number::from_f64(min_val).unwrap())])
241 }
242 }
243 "max" => {
244 let max_val = if field_name == "." {
245 data.iter()
246 .filter_map(|val| val.as_f64())
247 .fold(f64::NEG_INFINITY, f64::max)
248 } else {
249 data.iter()
250 .filter_map(|item| item.get(field_name))
251 .filter_map(|val| val.as_f64())
252 .fold(f64::NEG_INFINITY, f64::max)
253 };
254
255 if max_val == f64::NEG_INFINITY {
256 Ok(vec![Value::Null])
257 } else {
258 Ok(vec![Value::Number(serde_json::Number::from_f64(max_val).unwrap())])
259 }
260 }
261 _ => Err(Error::InvalidQuery(format!("Unsupported operation: {}", operation))),
262 }
263}
264
265fn apply_multi_field_aggregation_to_groups(
267 data: Vec<Value>,
268 operation: &str,
269 field_spec: &str,
270) -> Result<Vec<Value>, Error> {
271 let field_names = parse_multi_field_spec(field_spec);
272 let mut results = Vec::new();
273
274 for group_data in data {
275 if let Value::Object(group_obj) = group_data {
276 let group_name = group_obj.get("group").unwrap();
277 let items = group_obj.get("items").and_then(|v| v.as_array()).unwrap();
278
279 if field_names.len() == 1 {
280 let aggregated_value = apply_single_field_aggregation_to_group(items, operation, &field_names[0])?;
282
283 let mut result_obj = serde_json::Map::new();
284 result_obj.insert("group".to_string(), group_name.clone());
285 result_obj.insert(operation.to_string(), aggregated_value);
286 results.push(Value::Object(result_obj));
287 } else {
288 let mut result_obj = serde_json::Map::new();
290 result_obj.insert("group".to_string(), group_name.clone());
291
292 for field_name in &field_names {
293 let aggregated_value = apply_single_field_aggregation_to_group(items, operation, field_name)?;
294 let field_key = if field_name == "." {
295 format!("{}_root", operation)
296 } else {
297 format!("{}_{}", operation, field_name)
298 };
299 result_obj.insert(field_key, aggregated_value);
300 }
301
302 results.push(Value::Object(result_obj));
303 }
304 }
305 }
306
307 Ok(results)
308}
309
310fn apply_single_field_aggregation_to_group(
312 items: &[Value],
313 operation: &str,
314 field_name: &str,
315) -> Result<Value, Error> {
316 match operation {
317 "sum" => calculate_sum(items, field_name),
318 "avg" => calculate_avg(items, field_name),
319 "min" => calculate_min(items, field_name),
320 "max" => calculate_max(items, field_name),
321 _ => Ok(Value::Null),
322 }
323}
324
325pub fn apply_pipeline_operation(data: Vec<Value>, operation: &str) -> Result<Vec<Value>, Error> {
326 let trimmed_op = operation.trim();
327
328 if operation.starts_with(".[") && operation.ends_with("]") {
329 return apply_universal_slice_operation(data, operation);
330 }
331
332 if trimmed_op.starts_with("select(") && trimmed_op.ends_with(")") {
333 apply_simple_filter(data, trimmed_op)
335 } else if trimmed_op == "count" {
336 if is_grouped_data(&data) {
338 apply_aggregation_to_groups(data, "count", "")
339 } else {
340 let count = data.len();
341 let count_value = Value::Number(serde_json::Number::from(count));
342 Ok(vec![count_value])
343 }
344 } else if trimmed_op.starts_with("map(") && trimmed_op.ends_with(")") {
345 apply_map_operation(data, trimmed_op)
346 } else if trimmed_op.starts_with("select_fields(") && trimmed_op.ends_with(")") {
347 let fields_str = &trimmed_op[14..trimmed_op.len() - 1]; let field_list: Vec<String> = fields_str
350 .split(',')
351 .map(|s| s.trim().to_string())
352 .collect();
353
354 apply_field_selection(data, field_list)
355 } else if trimmed_op == "info" {
356 print_data_info(&data);
358 Ok(vec![]) } else if trimmed_op.starts_with("sum(") && trimmed_op.ends_with(")") {
360 let field_spec = &trimmed_op[4..trimmed_op.len() - 1];
361
362 if is_grouped_data(&data) {
363 apply_multi_field_aggregation_to_groups(data, "sum", field_spec)
364 } else {
365 apply_multi_field_aggregation(data, "sum", field_spec)
366 }
367 } else if trimmed_op.starts_with("avg(") && trimmed_op.ends_with(")") {
368 let field_spec = &trimmed_op[4..trimmed_op.len() - 1];
369
370 if is_grouped_data(&data) {
371 apply_multi_field_aggregation_to_groups(data, "avg", field_spec)
372 } else {
373 apply_multi_field_aggregation(data, "avg", field_spec)
374 }
375 } else if trimmed_op.starts_with("min(") && trimmed_op.ends_with(")") {
376 let field_spec = &trimmed_op[4..trimmed_op.len() - 1];
377
378 if is_grouped_data(&data) {
379 apply_multi_field_aggregation_to_groups(data, "min", field_spec)
380 } else {
381 apply_multi_field_aggregation(data, "min", field_spec)
382 }
383 } else if trimmed_op.starts_with("max(") && trimmed_op.ends_with(")") {
384 let field_spec = &trimmed_op[4..trimmed_op.len() - 1];
385
386 if is_grouped_data(&data) {
387 apply_multi_field_aggregation_to_groups(data, "max", field_spec)
388 } else {
389 apply_multi_field_aggregation(data, "max", field_spec)
390 }
391 } else if trimmed_op.starts_with("group_by(") && trimmed_op.ends_with(")") {
392 let field = &trimmed_op[9..trimmed_op.len() - 1];
394 let field_name = if field == "." {
395 "." } else {
397 field.trim_start_matches('.') };
399
400 let grouped = group_data_by_field(data, field_name)?;
401 Ok(grouped)
402 } else if trimmed_op == "unique" {
403 let result = apply_stats_operation(&data, "unique", None)?;
405 if let Value::Array(arr) = result {
406 Ok(arr)
407 } else {
408 Ok(vec![result])
409 }
410 } else if trimmed_op == "sort" {
411 let result = apply_stats_operation(&data, "sort", None)?;
413 if let Value::Array(arr) = result {
414 Ok(arr)
415 } else {
416 Ok(vec![result])
417 }
418 } else if trimmed_op == "length" {
419 let result = apply_stats_operation(&data, "length", None)?;
421 Ok(vec![result])
422 } else if trimmed_op == "median" {
423 let result = apply_stats_operation(&data, "median", None)?;
425 Ok(vec![result])
426 } else if trimmed_op == "stddev" {
427 let result = apply_stats_operation(&data, "stddev", None)?;
429 Ok(vec![result])
430 } else if trimmed_op.starts_with("unique(") && trimmed_op.ends_with(")") {
431 let field = &trimmed_op[7..trimmed_op.len() - 1];
433 let field_name = field.trim_start_matches('.');
434 let result = apply_stats_operation(&data, "unique", Some(field_name))?;
435 if let Value::Array(arr) = result {
436 Ok(arr)
437 } else {
438 Ok(vec![result])
439 }
440 } else if trimmed_op.starts_with("sort(") && trimmed_op.ends_with(")") {
441 let content = &trimmed_op[5..trimmed_op.len() - 1];
443 apply_sort_operation(data, content)
444 } else if trimmed_op.starts_with("median(") && trimmed_op.ends_with(")") {
445 let field = &trimmed_op[7..trimmed_op.len() - 1];
447 let field_name = field.trim_start_matches('.');
448 let result = apply_stats_operation(&data, "median", Some(field_name))?;
449 Ok(vec![result])
450 } else if trimmed_op.starts_with("stddev(") && trimmed_op.ends_with(")") {
451 let field = &trimmed_op[7..trimmed_op.len() - 1];
453 let field_name = field.trim_start_matches('.');
454 let result = apply_stats_operation(&data, "stddev", Some(field_name))?;
455 Ok(vec![result])
456 } else {
457 Err(Error::InvalidQuery(format!(
459 "Unsupported operation: '{}' (length: {}, starts with 'map(': {}, ends with ')': {})",
460 trimmed_op,
461 trimmed_op.len(),
462 trimmed_op.starts_with("map("),
463 trimmed_op.ends_with(")")
464 )))
465 }
466}
467
468fn apply_sort_operation(data: Vec<Value>, sort_spec: &str) -> Result<Vec<Value>, Error> {
719 if is_grouped_data(&data) {
721 apply_sort_to_grouped_data(data, sort_spec)
722 } else {
723 apply_sort_to_regular_data(data, sort_spec)
724 }
725}
726
727fn apply_sort_to_regular_data(data: Vec<Value>, sort_spec: &str) -> Result<Vec<Value>, Error> {
729 let (field_name, is_descending) = parse_sort_spec(sort_spec)?;
730
731 if field_name.is_empty() {
732 let mut sorted_data = data;
734 sorted_data.sort_by(|a, b| {
735 let comparison = compare_sort_values(a, b);
736 if is_descending {
737 comparison.reverse()
738 } else {
739 comparison
740 }
741 });
742 Ok(sorted_data)
743 } else {
744 let mut sorted_data = data;
746 sorted_data.sort_by(|a, b| {
747 let value_a = extract_sort_key(a, &format!(".{}", field_name));
748 let value_b = extract_sort_key(b, &format!(".{}", field_name));
749 let comparison = compare_sort_values(&value_a, &value_b);
750 if is_descending {
751 comparison.reverse()
752 } else {
753 comparison
754 }
755 });
756 Ok(sorted_data)
757 }
758}
759
760fn apply_sort_to_grouped_data(data: Vec<Value>, sort_spec: &str) -> Result<Vec<Value>, Error> {
762 let (field_name, is_descending) = parse_sort_spec(sort_spec)?;
763
764 let mut sorted_data = data;
765
766 if field_name.is_empty() {
767 sorted_data.sort_by(|a, b| {
769 let group_a = a.get("group").unwrap_or(&Value::Null);
770 let group_b = b.get("group").unwrap_or(&Value::Null);
771 let comparison = compare_sort_values(group_a, group_b);
772 if is_descending {
773 comparison.reverse()
774 } else {
775 comparison
776 }
777 });
778 } else {
779 sorted_data.sort_by(|a, b| {
781 let value_a = extract_aggregation_value(a, &field_name);
782 let value_b = extract_aggregation_value(b, &field_name);
783 let comparison = compare_sort_values(&value_a, &value_b);
784 if is_descending {
785 comparison.reverse()
786 } else {
787 comparison
788 }
789 });
790 }
791
792 Ok(sorted_data)
793}
794
795fn parse_sort_spec(sort_spec: &str) -> Result<(String, bool), Error> {
797 if sort_spec.is_empty() {
798 return Ok((String::new(), false)); }
800
801 let parts: Vec<&str> = sort_spec.split(',').map(|s| s.trim()).collect();
802
803 if parts.len() == 1 {
804 let field_name = parts[0].trim_start_matches('.').to_string();
806 Ok((field_name, false))
807 } else if parts.len() == 2 {
808 let field_name = parts[0].trim_start_matches('.').to_string();
810 let direction = parts[1].trim_matches('"').trim_matches('\'');
811 let is_descending = match direction.to_lowercase().as_str() {
812 "desc" | "descending" | "down" => true,
813 "asc" | "ascending" | "up" => false,
814 _ => return Err(Error::InvalidQuery(format!(
815 "Invalid sort direction: '{}'. Use 'asc' or 'desc'", direction
816 ))),
817 };
818 Ok((field_name, is_descending))
819 } else {
820 Err(Error::InvalidQuery(
821 "Invalid sort specification. Use: sort(.field) or sort(.field, \"desc\")".to_string()
822 ))
823 }
824}
825
826fn extract_aggregation_value(group_obj: &Value, field_name: &str) -> Value {
828 if let Value::Object(obj) = group_obj {
829 let possible_keys = vec![
831 field_name.to_string(),
832 format!("sum_{}", field_name),
833 format!("avg_{}", field_name),
834 format!("min_{}", field_name),
835 format!("max_{}", field_name),
836 format!("count_{}", field_name),
837 "sum".to_string(),
838 "avg".to_string(),
839 "min".to_string(),
840 "max".to_string(),
841 "count".to_string(),
842 ];
843
844 for key in possible_keys {
845 if let Some(value) = obj.get(&key) {
846 return value.clone();
847 }
848 }
849
850 obj.get("group").cloned().unwrap_or(Value::Null)
852 } else {
853 Value::Null
854 }
855}
856
857
858fn apply_map_operation(data: Vec<Value>, operation: &str) -> Result<Vec<Value>, Error> {
860 let content = &operation[4..operation.len() - 1]; if content.contains(',') && content.contains('|') {
865 apply_multi_field_map_operation(data, content)
867 } else {
868 apply_single_field_map_operation(data, content)
870 }
871}
872
873fn apply_single_field_map_operation(data: Vec<Value>, content: &str) -> Result<Vec<Value>, Error> {
875 let (field_access, string_operations) = parse_map_content(content)?;
876
877 let mut results = Vec::new();
878
879 for item in data {
880 let field_value = extract_field_value(&item, &field_access)?;
882
883 let transformed_value = apply_string_operations(&field_value, &string_operations)?;
885
886 let result = update_or_create_value(&item, &field_access, transformed_value)?;
888 results.push(result);
889 }
890
891 Ok(results)
892}
893
894fn apply_multi_field_map_operation(data: Vec<Value>, content: &str) -> Result<Vec<Value>, Error> {
896 let parts: Vec<&str> = content.split('|').map(|s| s.trim()).collect();
898
899 if parts.len() != 2 {
900 return Err(Error::InvalidQuery(
901 "Multi-field map must have format: (.field1, .field2 | operation)".to_string(),
902 ));
903 }
904
905 let fields_part = parts[0].trim();
906 let operation = parts[1].trim();
907
908 let field_paths: Vec<&str> = fields_part.split(',').map(|s| s.trim()).collect();
910
911 for field_path in &field_paths {
913 if !field_path.starts_with('.') {
914 return Err(Error::InvalidQuery(format!(
915 "Field path must start with '.': {}",
916 field_path
917 )));
918 }
919 }
920
921 let mut results = Vec::new();
922
923 for item in data {
924 let transformed_item =
926 crate::string_ops::apply_operation_to_multiple_fields(&item, &field_paths, operation)?;
927 results.push(transformed_item);
928 }
929
930 Ok(results)
931}
932
933fn parse_map_content(content: &str) -> Result<(String, Vec<String>), Error> {
935 let parts: Vec<&str> = content.split('|').map(|s| s.trim()).collect();
936
937 if parts.is_empty() {
938 return Err(Error::InvalidQuery("Empty map operation".to_string()));
939 }
940
941 let field_access = parts[0].to_string();
943
944 let string_operations: Vec<String> = parts[1..].iter().map(|s| s.to_string()).collect();
946
947 Ok((field_access, string_operations))
948}
949
950fn extract_field_value(item: &Value, field_access: &str) -> Result<Value, Error> {
952 if field_access == "." {
953 return Ok(item.clone());
955 }
956
957 if field_access.starts_with('.') {
958 let field_name = &field_access[1..]; if let Some(value) = item.get(field_name) {
961 Ok(value.clone())
962 } else {
963 Err(Error::InvalidQuery(format!(
964 "Field '{}' not found",
965 field_name
966 )))
967 }
968 } else {
969 Err(Error::InvalidQuery(format!(
970 "Invalid field access: {}",
971 field_access
972 )))
973 }
974}
975
976fn apply_string_operations(value: &Value, operations: &[String]) -> Result<Value, Error> {
978 if operations.is_empty() {
979 return Ok(value.clone());
980 }
981
982 let operations_str: Vec<&str> = operations.iter().map(|s| s.as_str()).collect();
983 string_ops::apply_string_pipeline(value, &operations_str)
984}
985
986fn update_or_create_value(
988 original: &Value,
989 field_access: &str,
990 new_value: Value,
991) -> Result<Value, Error> {
992 if field_access == "." {
993 Ok(new_value)
995 } else if field_access.starts_with('.') {
996 let field_name = &field_access[1..];
997
998 if let Value::Object(mut obj) = original.clone() {
1000 obj.insert(field_name.to_string(), new_value);
1001 Ok(Value::Object(obj))
1002 } else {
1003 let mut new_obj = serde_json::Map::new();
1005 new_obj.insert(field_name.to_string(), new_value);
1006 Ok(Value::Object(new_obj))
1007 }
1008 } else {
1009 Err(Error::InvalidQuery(format!(
1010 "Invalid field access: {}",
1011 field_access
1012 )))
1013 }
1014}
1015
1016fn apply_field_selection(data: Vec<Value>, field_list: Vec<String>) -> Result<Vec<Value>, Error> {
1017 let mut results = Vec::new();
1018
1019 for item in data {
1020 if let Value::Object(obj) = item {
1021 let mut selected_obj = serde_json::Map::new();
1022
1023 for field_name in &field_list {
1025 if let Some(value) = obj.get(field_name) {
1026 selected_obj.insert(field_name.clone(), value.clone());
1027 }
1028 }
1029
1030 results.push(Value::Object(selected_obj));
1031 } else {
1032 return Err(Error::InvalidQuery(
1034 "select_fields can only be applied to objects".into(),
1035 ));
1036 }
1037 }
1038
1039 Ok(results)
1040}
1041
1042fn group_data_by_field(data: Vec<Value>, field_name: &str) -> Result<Vec<Value>, Error> {
1043 use std::collections::HashMap;
1044
1045 let mut groups: HashMap<String, Vec<Value>> = HashMap::new();
1046
1047 for item in data {
1048 let key = if field_name == "." {
1049 value_to_string(&item)
1051 } else {
1052 if let Some(field_value) = item.get(field_name) {
1054 value_to_string(field_value)
1055 } else {
1056 "null".to_string()
1057 }
1058 };
1059
1060 groups.entry(key).or_default().push(item);
1061 }
1062
1063 let result: Vec<Value> = groups
1065 .into_iter()
1066 .map(|(group_name, group_items)| {
1067 let mut group_obj = serde_json::Map::new();
1068 group_obj.insert("group".to_string(), Value::String(group_name));
1069 group_obj.insert("items".to_string(), Value::Array(group_items));
1070 Value::Object(group_obj)
1071 })
1072 .collect();
1073
1074 Ok(result)
1075}
1076
1077fn parse_condition(condition: &str) -> Result<(String, String, String), Error> {
1078 let condition = condition.trim();
1079
1080 if let Some(pos) = condition.find(" >= ") {
1082 let field = condition[..pos].trim().to_string();
1083 let value = condition[pos + 4..].trim().to_string();
1084 return Ok((field, ">=".to_string(), value));
1085 }
1086
1087 if let Some(pos) = condition.find(" <= ") {
1088 let field = condition[..pos].trim().to_string();
1089 let value = condition[pos + 4..].trim().to_string();
1090 return Ok((field, "<=".to_string(), value));
1091 }
1092
1093 if let Some(pos) = condition.find(" > ") {
1094 let field = condition[..pos].trim().to_string();
1095 let value = condition[pos + 3..].trim().to_string();
1096 return Ok((field, ">".to_string(), value));
1097 }
1098
1099 if let Some(pos) = condition.find(" < ") {
1100 let field = condition[..pos].trim().to_string();
1101 let value = condition[pos + 3..].trim().to_string();
1102 return Ok((field, "<".to_string(), value));
1103 }
1104
1105 if let Some(pos) = condition.find(" == ") {
1106 let field = condition[..pos].trim().to_string();
1107 let value = condition[pos + 4..].trim().to_string();
1108 return Ok((field, "==".to_string(), value));
1109 }
1110
1111 if let Some(pos) = condition.find(" != ") {
1112 let field = condition[..pos].trim().to_string();
1113 let value = condition[pos + 4..].trim().to_string();
1114 return Ok((field, "!=".to_string(), value));
1115 }
1116
1117 Err(Error::InvalidQuery("Invalid condition format".into()))
1118}
1119
1120fn evaluate_condition(item: &Value, field_path: &str, operator: &str, value: &str) -> bool {
1121 let field_name = if field_path.starts_with('.') {
1123 &field_path[1..]
1124 } else {
1125 field_path
1126 };
1127
1128 let field_value = match item.get(field_name) {
1129 Some(val) => val,
1130 None => return false,
1131 };
1132
1133 match operator {
1134 ">" => compare_greater(field_value, value),
1135 "<" => compare_less(field_value, value),
1136 ">=" => compare_greater_equal(field_value, value),
1137 "<=" => compare_less_equal(field_value, value),
1138 "==" => compare_equal(field_value, value),
1139 "!=" => !compare_equal(field_value, value),
1140 _ => false,
1141 }
1142}
1143
1144fn compare_greater(field_value: &Value, target: &str) -> bool {
1145 match field_value {
1146 Value::Number(n) => {
1147 if let Ok(target_num) = target.parse::<f64>() {
1148 n.as_f64().unwrap_or(0.0) > target_num
1149 } else {
1150 false
1151 }
1152 }
1153 _ => false,
1154 }
1155}
1156
1157fn compare_less(field_value: &Value, target: &str) -> bool {
1158 match field_value {
1159 Value::Number(n) => {
1160 if let Ok(target_num) = target.parse::<f64>() {
1161 n.as_f64().unwrap_or(0.0) < target_num
1162 } else {
1163 false
1164 }
1165 }
1166 _ => false,
1167 }
1168}
1169
1170fn compare_equal(field_value: &Value, target: &str) -> bool {
1171 match field_value {
1172 Value::String(s) => {
1173 let target_clean = target.trim_matches('"');
1175 s == target_clean
1176 }
1177 Value::Number(n) => {
1178 if let Ok(target_num) = target.parse::<f64>() {
1179 n.as_f64().unwrap_or(0.0) == target_num
1180 } else {
1181 false
1182 }
1183 }
1184 Value::Bool(b) => match target {
1185 "true" => *b,
1186 "false" => !*b,
1187 _ => false,
1188 },
1189 _ => false,
1190 }
1191}
1192
1193fn compare_greater_equal(field_value: &Value, target: &str) -> bool {
1194 match field_value {
1195 Value::Number(n) => {
1196 if let Ok(target_num) = target.parse::<f64>() {
1197 n.as_f64().unwrap_or(0.0) >= target_num
1198 } else {
1199 false
1200 }
1201 }
1202 _ => false,
1203 }
1204}
1205
1206fn compare_less_equal(field_value: &Value, target: &str) -> bool {
1207 match field_value {
1208 Value::Number(n) => {
1209 if let Ok(target_num) = target.parse::<f64>() {
1210 n.as_f64().unwrap_or(0.0) <= target_num
1211 } else {
1212 false
1213 }
1214 }
1215 _ => false,
1216 }
1217}
1218
1219fn is_grouped_data(data: &[Value]) -> bool {
1220 data.iter().all(|item| {
1221 if let Value::Object(obj) = item {
1222 obj.contains_key("group") && obj.contains_key("items")
1223 } else {
1224 false
1225 }
1226 })
1227}
1228
1229fn apply_aggregation_to_groups(
1230 data: Vec<Value>,
1231 operation: &str,
1232 field_name: &str,
1233) -> Result<Vec<Value>, Error> {
1234 let mut results = Vec::new();
1235
1236 for group_data in data {
1237 if let Value::Object(group_obj) = group_data {
1238 let group_name = group_obj.get("group").unwrap();
1239 let items = group_obj.get("items").and_then(|v| v.as_array()).unwrap();
1240
1241 let aggregated_value = match operation {
1243 "avg" => calculate_avg(items, field_name)?,
1244 "sum" => calculate_sum(items, field_name)?,
1245 "count" => Value::Number(serde_json::Number::from(items.len())),
1246 "min" => calculate_min(items, field_name)?,
1247 "max" => calculate_max(items, field_name)?,
1248 _ => Value::Null,
1249 };
1250
1251 let mut result_obj = serde_json::Map::new();
1253 result_obj.insert("group".to_string(), group_name.clone());
1254 result_obj.insert(operation.to_string(), aggregated_value);
1255 results.push(Value::Object(result_obj));
1256 }
1257 }
1258
1259 Ok(results)
1260}
1261
1262fn calculate_avg(items: &[Value], field_name: &str) -> Result<Value, Error> {
1263 let values: Vec<f64> = items
1264 .iter()
1265 .filter_map(|item| item.get(field_name))
1266 .filter_map(|val| val.as_f64())
1267 .collect();
1268
1269 if values.is_empty() {
1270 Ok(Value::Null)
1271 } else {
1272 let avg = values.iter().sum::<f64>() / values.len() as f64;
1273 let rounded_avg = (avg * 10.0).round() / 10.0;
1274 Ok(Value::Number(
1275 serde_json::Number::from_f64(rounded_avg).unwrap(),
1276 ))
1277 }
1278}
1279
1280fn calculate_sum(items: &[Value], field_name: &str) -> Result<Value, Error> {
1281 let sum: f64 = items
1282 .iter()
1283 .filter_map(|item| item.get(field_name))
1284 .filter_map(|val| val.as_f64())
1285 .sum();
1286
1287 let rounded_sum = if sum.fract() == 0.0 {
1288 sum
1289 } else {
1290 (sum * 10.0).round() / 10.0
1291 };
1292
1293 Ok(Value::Number(
1294 serde_json::Number::from_f64(rounded_sum).unwrap(),
1295 ))
1296}
1297
1298fn calculate_min(items: &[Value], field_name: &str) -> Result<Value, Error> {
1299 let min_val = items
1300 .iter()
1301 .filter_map(|item| item.get(field_name))
1302 .filter_map(|val| val.as_f64())
1303 .fold(f64::INFINITY, f64::min);
1304
1305 if min_val == f64::INFINITY {
1306 Ok(Value::Null)
1307 } else {
1308 Ok(Value::Number(
1309 serde_json::Number::from_f64(min_val).unwrap(),
1310 ))
1311 }
1312}
1313
1314fn calculate_max(items: &[Value], field_name: &str) -> Result<Value, Error> {
1315 let max_val = items
1316 .iter()
1317 .filter_map(|item| item.get(field_name))
1318 .filter_map(|val| val.as_f64())
1319 .fold(f64::NEG_INFINITY, f64::max);
1320
1321 if max_val == f64::NEG_INFINITY {
1322 Ok(Value::Null)
1323 } else {
1324 Ok(Value::Number(
1325 serde_json::Number::from_f64(max_val).unwrap(),
1326 ))
1327 }
1328}
1329
1330pub fn apply_array_slice(array: &[Value], start: Option<usize>, end: Option<usize>) -> Vec<Value> {
1332 let len = array.len();
1333
1334 let start_idx = start.unwrap_or(0);
1335 let end_idx = end.unwrap_or(len);
1336
1337 let start_idx = start_idx.min(len);
1339 let end_idx = end_idx.min(len);
1340
1341 if start_idx >= end_idx {
1342 return Vec::new(); }
1344
1345 array[start_idx..end_idx].to_vec()
1346}
1347
1348pub fn apply_slice_to_grouped_data(
1350 data: Vec<Value>,
1351 start: Option<usize>,
1352 end: Option<usize>,
1353) -> Result<Vec<Value>, Error> {
1354 let mut result = Vec::new();
1355
1356 for group in data {
1357 if let Value::Array(group_items) = group {
1358 let sliced_group = apply_array_slice(&group_items, start, end);
1360
1361 result.extend(sliced_group);
1363 } else {
1364 result.push(group);
1366 }
1367 }
1368
1369 Ok(result)
1370}
1371
1372pub fn parse_slice_notation(
1374 bracket_content: &str,
1375) -> Result<(Option<usize>, Option<usize>), Error> {
1376 if !bracket_content.contains(':') {
1377 return Err(Error::InvalidQuery("Not a slice notation".to_string()));
1378 }
1379
1380 let parts: Vec<&str> = bracket_content.split(':').collect();
1381 if parts.len() != 2 {
1382 return Err(Error::InvalidQuery(
1383 "Invalid slice format, expected start:end".to_string(),
1384 ));
1385 }
1386
1387 let start = if parts[0].is_empty() {
1388 None } else {
1390 Some(
1391 parts[0]
1392 .parse::<usize>()
1393 .map_err(|_| Error::InvalidQuery(format!("Invalid start index: {}", parts[0])))?,
1394 )
1395 };
1396
1397 let end = if parts[1].is_empty() {
1398 None } else {
1400 Some(
1401 parts[1]
1402 .parse::<usize>()
1403 .map_err(|_| Error::InvalidQuery(format!("Invalid end index: {}", parts[1])))?,
1404 )
1405 };
1406
1407 Ok((start, end))
1408}
1409
1410pub fn parse_slice_notation_with_negative(
1412 bracket_content: &str,
1413 data_len: usize,
1414) -> Result<(Option<usize>, Option<usize>), Error> {
1415 let parts: Vec<&str> = bracket_content.split(':').collect();
1416 if parts.len() != 2 {
1417 return Err(Error::InvalidQuery(
1418 "Invalid slice format, expected start:end".to_string(),
1419 ));
1420 }
1421
1422 let start = if parts[0].is_empty() {
1423 None
1424 } else {
1425 Some(parse_index_with_negative(parts[0], data_len)?)
1426 };
1427
1428 let end = if parts[1].is_empty() {
1429 None
1430 } else {
1431 Some(parse_index_with_negative(parts[1], data_len)?)
1432 };
1433
1434 Ok((start, end))
1435}
1436
1437pub fn parse_index_with_negative(index_str: &str, data_len: usize) -> Result<usize, Error> {
1439 if index_str.starts_with('-') {
1440 let negative_index = index_str[1..]
1441 .parse::<usize>()
1442 .map_err(|_| Error::InvalidQuery(format!("Invalid negative index: {}", index_str)))?;
1443
1444 if negative_index > data_len {
1445 Ok(0) } else {
1447 Ok(data_len - negative_index)
1448 }
1449 } else {
1450 index_str
1451 .parse::<usize>()
1452 .map_err(|_| Error::InvalidQuery(format!("Invalid index: {}", index_str)))
1453 }
1454}
1455
1456#[derive(Debug, PartialEq)]
1458pub enum DataStructure {
1459 GroupedData, RegularArray, NestedArrays, Mixed, }
1464
1465pub fn detect_data_structure(data: &[Value]) -> DataStructure {
1466 if data.is_empty() {
1467 return DataStructure::RegularArray;
1468 }
1469
1470 let all_arrays = data.iter().all(|item| item.is_array());
1471 let all_objects = data.iter().all(|item| item.is_object());
1472 let all_primitives = data.iter().all(|item| {
1473 matches!(
1474 item,
1475 Value::String(_) | Value::Number(_) | Value::Bool(_) | Value::Null
1476 )
1477 });
1478
1479 if all_arrays {
1480 if is_likely_grouped_data(data) {
1482 DataStructure::GroupedData
1483 } else {
1484 DataStructure::NestedArrays
1485 }
1486 } else if all_objects || all_primitives {
1487 DataStructure::RegularArray
1488 } else {
1489 DataStructure::Mixed
1490 }
1491}
1492
1493pub fn is_likely_grouped_data(data: &[Value]) -> bool {
1495 if data.len() < 2 {
1501 return false; }
1503
1504 for item in data {
1505 if let Value::Array(arr) = item {
1506 if arr.is_empty() {
1507 return false;
1508 }
1509 if !arr[0].is_object() {
1510 return false;
1511 }
1512 } else {
1513 return false;
1514 }
1515 }
1516
1517 true
1518}
1519
1520pub fn apply_universal_slice_operation(
1522 data: Vec<Value>,
1523 operation: &str,
1524) -> Result<Vec<Value>, Error> {
1525 let bracket_content = &operation[2..operation.len() - 1]; if bracket_content.starts_with('-') && !bracket_content.contains(':') {
1529 return apply_negative_index_slice(data, bracket_content);
1530 }
1531
1532 if !bracket_content.contains(':') {
1534 let index = bracket_content
1536 .parse::<usize>()
1537 .map_err(|_| Error::InvalidQuery(format!("Invalid index: {}", bracket_content)))?;
1538
1539 if let Some(item) = data.get(index) {
1540 return Ok(vec![item.clone()]);
1541 } else {
1542 return Ok(vec![]); }
1544 }
1545
1546 let (start, end) = parse_slice_notation_with_negative(bracket_content, data.len())?;
1548
1549 match detect_data_structure(&data) {
1551 DataStructure::GroupedData => apply_slice_to_grouped_data(data, start, end),
1552 DataStructure::RegularArray => apply_slice_to_regular_array(data, start, end),
1553 DataStructure::NestedArrays => apply_slice_to_nested_arrays(data, start, end),
1554 DataStructure::Mixed => apply_slice_to_regular_array(data, start, end), }
1556}
1557
1558pub fn apply_negative_index_slice(data: Vec<Value>, index_str: &str) -> Result<Vec<Value>, Error> {
1560 let data_len = data.len();
1561 let negative_index = index_str[1..]
1562 .parse::<usize>()
1563 .map_err(|_| Error::InvalidQuery(format!("Invalid negative index: {}", index_str)))?;
1564
1565 if negative_index > data_len || negative_index == 0 {
1566 return Ok(vec![]); }
1568
1569 let actual_index = data_len - negative_index;
1570 if let Some(item) = data.get(actual_index) {
1571 Ok(vec![item.clone()])
1572 } else {
1573 Ok(vec![])
1574 }
1575}
1576
1577pub fn apply_slice_to_regular_array(
1579 data: Vec<Value>,
1580 start: Option<usize>,
1581 end: Option<usize>,
1582) -> Result<Vec<Value>, Error> {
1583 let sliced = apply_array_slice(&data, start, end);
1584 Ok(sliced)
1585}
1586
1587pub fn apply_slice_to_nested_arrays(
1589 data: Vec<Value>,
1590 start: Option<usize>,
1591 end: Option<usize>,
1592) -> Result<Vec<Value>, Error> {
1593 let sliced = apply_array_slice(&data, start, end);
1595 Ok(sliced)
1596}
1597
1598pub fn apply_sort_with_field_operation(
1600 data: Vec<Value>,
1601 operation: &str,
1602) -> Result<Vec<Value>, Error> {
1603 let field_path = &operation[5..operation.len() - 1]; let mut sorted_data = data;
1606 sorted_data.sort_by(|a, b| {
1607 let value_a = extract_sort_key(a, field_path);
1608 let value_b = extract_sort_key(b, field_path);
1609
1610 compare_sort_values(&value_a, &value_b)
1611 });
1612
1613 Ok(sorted_data)
1614}
1615
1616fn extract_sort_key(item: &Value, field_path: &str) -> Value {
1627 if field_path == "." || field_path.is_empty() {
1628 item.clone()
1629 } else if field_path.starts_with('.') {
1630 let field_name = &field_path[1..];
1631 item.get(field_name).cloned().unwrap_or(Value::Null)
1632 } else {
1633 item.get(field_path).cloned().unwrap_or(Value::Null)
1634 }
1635}
1636
1637fn compare_sort_values(a: &Value, b: &Value) -> std::cmp::Ordering {
1657 use std::cmp::Ordering;
1658
1659 match (a, b) {
1660 (Value::Number(n1), Value::Number(n2)) => {
1661 let f1 = n1.as_f64().unwrap_or(0.0);
1662 let f2 = n2.as_f64().unwrap_or(0.0);
1663 f1.partial_cmp(&f2).unwrap_or(Ordering::Equal)
1664 }
1665 (Value::String(s1), Value::String(s2)) => s1.cmp(s2),
1666 (Value::Bool(b1), Value::Bool(b2)) => b1.cmp(b2),
1667 (Value::Null, Value::Null) => Ordering::Equal,
1668 (Value::Null, _) => Ordering::Less,
1669 (_, Value::Null) => Ordering::Greater,
1670 _ => {
1672 let s1 = value_to_string(a);
1673 let s2 = value_to_string(b);
1674 s1.cmp(&s2)
1675 }
1676 }
1677}
1678
1679
1680#[cfg(test)]
1681mod tests {
1682 use super::*;
1683 use serde_json::json;
1684
1685 #[test]
1686 fn test_apply_array_slice_basic() {
1687 let array = vec![json!("a"), json!("b"), json!("c"), json!("d"), json!("e")];
1688
1689 let result = apply_array_slice(&array, Some(0), Some(3));
1691 assert_eq!(result.len(), 3);
1692 assert_eq!(result[0], json!("a"));
1693 assert_eq!(result[2], json!("c"));
1694
1695 let result = apply_array_slice(&array, Some(1), Some(4));
1697 assert_eq!(result.len(), 3);
1698 assert_eq!(result[0], json!("b"));
1699
1700 let result = apply_array_slice(&array, None, Some(3));
1702 assert_eq!(result.len(), 3);
1703 assert_eq!(result[0], json!("a"));
1704
1705 let result = apply_array_slice(&array, Some(2), None);
1707 assert_eq!(result.len(), 3);
1708 assert_eq!(result[0], json!("c"));
1709
1710 let result = apply_array_slice(&array, Some(10), Some(20));
1712 assert_eq!(result.len(), 0);
1713
1714 let result = apply_array_slice(&array, Some(3), Some(1));
1716 assert_eq!(result.len(), 0);
1717 }
1718
1719 #[test]
1720 fn test_apply_slice_to_grouped_data() {
1721 let grouped_data = vec![
1723 json!([
1724 {"category": "Electronics", "name": "Laptop", "price": 1200},
1725 {"category": "Electronics", "name": "Phone", "price": 800},
1726 {"category": "Electronics", "name": "Tablet", "price": 600},
1727 {"category": "Electronics", "name": "Mouse", "price": 25}
1728 ]),
1729 json!([
1730 {"category": "Books", "name": "Fiction", "price": 20},
1731 {"category": "Books", "name": "Science", "price": 30},
1732 {"category": "Books", "name": "History", "price": 25},
1733 {"category": "Books", "name": "Biography", "price": 35}
1734 ]),
1735 json!([
1736 {"category": "Clothing", "name": "Shirt", "price": 40},
1737 {"category": "Clothing", "name": "Pants", "price": 60},
1738 {"category": "Clothing", "name": "Shoes", "price": 80}
1739 ]),
1740 ];
1741
1742 let result = apply_slice_to_grouped_data(grouped_data.clone(), Some(0), Some(2)).unwrap();
1744
1745 assert_eq!(result.len(), 6);
1747
1748 assert_eq!(result[0].get("name").unwrap(), &json!("Laptop"));
1750 assert_eq!(result[1].get("name").unwrap(), &json!("Phone"));
1751
1752 assert_eq!(result[2].get("name").unwrap(), &json!("Fiction"));
1754 assert_eq!(result[3].get("name").unwrap(), &json!("Science"));
1755
1756 assert_eq!(result[4].get("name").unwrap(), &json!("Shirt"));
1758 assert_eq!(result[5].get("name").unwrap(), &json!("Pants"));
1759 }
1760
1761 #[test]
1762 fn test_apply_slice_to_grouped_data_different_ranges() {
1763 let grouped_data = vec![
1764 json!([
1765 {"id": 1, "group": "A"},
1766 {"id": 2, "group": "A"},
1767 {"id": 3, "group": "A"},
1768 {"id": 4, "group": "A"},
1769 {"id": 5, "group": "A"}
1770 ]),
1771 json!([
1772 {"id": 6, "group": "B"},
1773 {"id": 7, "group": "B"},
1774 {"id": 8, "group": "B"},
1775 {"id": 9, "group": "B"}
1776 ]),
1777 ];
1778
1779 let result = apply_slice_to_grouped_data(grouped_data.clone(), Some(1), Some(4)).unwrap();
1781
1782 assert_eq!(result.len(), 6);
1784
1785 assert_eq!(result[0].get("id").unwrap(), &json!(2));
1787 assert_eq!(result[1].get("id").unwrap(), &json!(3));
1788 assert_eq!(result[2].get("id").unwrap(), &json!(4));
1789
1790 assert_eq!(result[3].get("id").unwrap(), &json!(7));
1792 assert_eq!(result[4].get("id").unwrap(), &json!(8));
1793 assert_eq!(result[5].get("id").unwrap(), &json!(9));
1794 }
1795
1796 #[test]
1797 fn test_parse_slice_notation() {
1798 let (start, end) = parse_slice_notation("0:5").unwrap();
1800 assert_eq!(start, Some(0));
1801 assert_eq!(end, Some(5));
1802
1803 let (start, end) = parse_slice_notation(":5").unwrap();
1805 assert_eq!(start, None);
1806 assert_eq!(end, Some(5));
1807
1808 let (start, end) = parse_slice_notation("2:").unwrap();
1810 assert_eq!(start, Some(2));
1811 assert_eq!(end, None);
1812
1813 let (start, end) = parse_slice_notation(":").unwrap();
1815 assert_eq!(start, None);
1816 assert_eq!(end, None);
1817
1818 assert!(parse_slice_notation("abc:def").is_err());
1820 assert!(parse_slice_notation("0:5:10").is_err());
1821 }
1822
1823 #[test]
1824 fn test_parse_index_with_negative() {
1825 assert_eq!(parse_index_with_negative("5", 10).unwrap(), 5);
1827
1828 assert_eq!(parse_index_with_negative("-1", 10).unwrap(), 9);
1830 assert_eq!(parse_index_with_negative("-3", 10).unwrap(), 7);
1831
1832 assert_eq!(parse_index_with_negative("-15", 10).unwrap(), 0);
1834
1835 assert!(parse_index_with_negative("abc", 10).is_err());
1837 assert!(parse_index_with_negative("-abc", 10).is_err());
1838 }
1839
1840 #[test]
1841 fn test_detect_data_structure() {
1842 let regular = vec![json!({"id": 1}), json!({"id": 2})];
1844 assert_eq!(detect_data_structure(®ular), DataStructure::RegularArray);
1845
1846 let grouped = vec![
1848 json!([{"cat": "A", "val": 1}, {"cat": "A", "val": 2}]),
1849 json!([{"cat": "B", "val": 3}, {"cat": "B", "val": 4}]),
1850 ];
1851 assert_eq!(detect_data_structure(&grouped), DataStructure::GroupedData);
1852
1853 let primitives = vec![json!(1), json!(2), json!(3)];
1855 assert_eq!(
1856 detect_data_structure(&primitives),
1857 DataStructure::RegularArray
1858 );
1859
1860 let empty: Vec<Value> = vec![];
1862 assert_eq!(detect_data_structure(&empty), DataStructure::RegularArray);
1863 }
1864
1865 #[test]
1866 fn test_apply_sort_with_field_operation() {
1867 let data = vec![
1868 json!({"name": "Alice", "score": 85}),
1869 json!({"name": "Bob", "score": 92}),
1870 json!({"name": "Carol", "score": 78}),
1871 ];
1872
1873 let result = apply_sort_with_field_operation(data, "sort(.score)").unwrap();
1874
1875 assert_eq!(result[0].get("score").unwrap(), &json!(78)); assert_eq!(result[1].get("score").unwrap(), &json!(85)); assert_eq!(result[2].get("score").unwrap(), &json!(92)); }
1880
1881 #[test]
1882 fn test_apply_negative_index_slice() {
1883 let data = vec![json!("a"), json!("b"), json!("c"), json!("d"), json!("e")];
1884
1885 let result = apply_negative_index_slice(data.clone(), "-1").unwrap();
1887 assert_eq!(result.len(), 1);
1888 assert_eq!(result[0], json!("e"));
1889
1890 let result = apply_negative_index_slice(data.clone(), "-3").unwrap();
1892 assert_eq!(result.len(), 1);
1893 assert_eq!(result[0], json!("c"));
1894
1895 let result = apply_negative_index_slice(data.clone(), "-10").unwrap();
1897 assert_eq!(result.len(), 0);
1898 }
1899}