Skip to main content

kura_cli/commands/
read.rs

1use clap::{Args, Subcommand, ValueEnum};
2use serde_json::{Map, Value, json};
3
4use crate::util::{
5    RawApiResponse, dry_run_enabled, emit_dry_run_request, exit_error, print_json_stderr,
6    print_json_stdout, raw_api_request_json, raw_api_request_with_query,
7};
8
9const READ_SOURCE_CATALOG_SECTION: &str =
10    "system_config.conventions::public_agent_read_source_catalog_v1";
11const READ_SOURCE_CATALOG_ENDPOINT: &str = "/v1/system/config/section";
12const READ_QUERY_ENDPOINT: &str = "/v4/agent/read-query";
13const READ_QUERY_SCHEMA_VERSION: &str = "read_query_request.v1";
14const READ_QUERY_HELP: &str = r#"Examples:
15  kura read_query --source-id activity_session_history --read-kind list --subject session_history --filters-json '{"date":"2026-04-02"}' --output-shape collection
16  kura read_query --source-id performance_tests --read-kind timeline --subject test_timeline --filters-json '{"test_type":"cmj","measured_property":"jump_height_cm","date_from":"2026-01-01"}' --output-shape series
17  kura read_query --source-id activity_session_history --read-kind aggregate --subject activity_summary --filters-json '{"date_from":"2026-04-01"}' --output-shape aggregate
18  kura read_query --source-id authoritative_training_memory --read-kind aggregate --query-json '{"group_by":"exercise_id","aggregate":"top_load_max_kg","sort":{"field":"top_load_max_kg","direction":"desc"}}' --limit 20
19
20Rules:
21  Use read_query for facts and bounded summaries. Use analyze for trend, stagnation, driver, influence, or training-decision questions.
22  Use source_id=performance_tests for benchmark, jump-test, and performance-test history questions; common aliases such as benchmarks are normalized to performance_tests.
23  Use activity_session_history comparison_stream for exact progress; exercise_identity is only the movement-family view when variants are mixed.
24  Continue with the returned cursor only when continuation.has_more=true.
25"#;
26
27pub fn public_read_query_payload_contract() -> Value {
28    json!({
29        "schema_version": "public_read_query_payload_contract.v1",
30        "entrypoint": "kura read_query",
31        "truth_namespace": "canonical_read_query",
32        "principles": [
33            "Use exactly one canonical source_id per call.",
34            "Use cursor only for the continuation returned by a previous read_query response.",
35            "Keep bounded training query fields on source_id=authoritative_training_memory only.",
36            "For activity_session_history, use comparison_stream as the exact progress grouping; exercise_identity is only the movement-family view when variants are mixed."
37        ],
38        "examples": [
39            {
40                "label": "Latest activity sessions for one day",
41                "payload": {
42                    "source_id": "activity_session_history",
43                    "read_kind": "list",
44                    "subject": "session_history",
45                    "filters": {
46                        "date": "2026-04-02"
47                    },
48                    "output_shape": "collection"
49                }
50            },
51            {
52                "label": "Performance-test timeline",
53                "payload": {
54                    "source_id": "performance_tests",
55                    "read_kind": "timeline",
56                    "subject": "test_timeline",
57                    "filters": {
58                        "test_type": "cmj",
59                        "measured_property": "jump_height_cm",
60                        "date_from": "2026-01-01"
61                    },
62                    "output_shape": "series"
63                }
64            }
65        ]
66    })
67}
68
69#[derive(Subcommand)]
70pub enum ReadCommands {
71    /// List the machine-readable readable-source catalog or one source capability card
72    Sources(ReadSourcesArgs),
73    /// Run one declarative read_query request against the universal public read surface
74    Query(ReadQueryArgs),
75}
76
77#[derive(Args)]
78pub struct ReadSourcesArgs {
79    /// Optional source id to return only one source capability card
80    #[arg(long, alias = "source")]
81    source_id: Option<String>,
82}
83
84#[derive(Args)]
85#[command(after_help = READ_QUERY_HELP)]
86pub struct ReadQueryArgs {
87    /// Readable source id for one canonical read_query call
88    #[arg(long)]
89    source_id: String,
90    /// Retrieval shape for this read
91    #[arg(long, value_enum)]
92    read_kind: ReadKindArg,
93    /// Optional source-specific subject
94    #[arg(long)]
95    subject: Option<String>,
96    /// Exact lookup key for projection-style sources
97    #[arg(long)]
98    lookup_key: Option<String>,
99    /// Training exercise id or label filter
100    #[arg(long)]
101    exercise_id_or_label: Option<String>,
102    /// Training session state filter
103    #[arg(long)]
104    session_state: Option<String>,
105    /// Absolute local date in YYYY-MM-DD
106    #[arg(long)]
107    date: Option<String>,
108    /// Absolute local start date in YYYY-MM-DD
109    #[arg(long)]
110    date_from: Option<String>,
111    /// Absolute local end date in YYYY-MM-DD
112    #[arg(long)]
113    date_to: Option<String>,
114    /// Full source-specific filters JSON object
115    #[arg(long = "filters-json", alias = "filters")]
116    filters_json: Option<String>,
117    /// Optional projected field list (comma-separated)
118    #[arg(long, value_delimiter = ',')]
119    fields: Option<Vec<String>>,
120    /// Optional bounded group_by field
121    #[arg(long)]
122    group_by: Option<String>,
123    /// Optional bounded aggregate name
124    #[arg(long)]
125    aggregate: Option<String>,
126    /// Optional sort field
127    #[arg(long)]
128    sort_field: Option<String>,
129    /// Optional sort direction
130    #[arg(long, value_enum)]
131    sort_direction: Option<ReadSortDirectionArg>,
132    /// Full bounded query JSON object
133    #[arg(long = "query-json", alias = "query")]
134    query_json: Option<String>,
135    /// Optional exact page size
136    #[arg(long)]
137    limit: Option<u32>,
138    /// Optional opaque continuation cursor
139    #[arg(long)]
140    cursor: Option<String>,
141    /// Optional server-owned result handle
142    #[arg(long)]
143    result_handle: Option<String>,
144    /// Optional response shape override
145    #[arg(long, value_enum)]
146    output_shape: Option<ReadOutputShapeArg>,
147    /// Optional analysis escalation mode
148    #[arg(long, value_enum)]
149    analysis_mode: Option<ReadAnalysisModeArg>,
150}
151
152#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
153pub enum ReadKindArg {
154    Lookup,
155    List,
156    Timeline,
157    Aggregate,
158}
159
160impl ReadKindArg {
161    fn as_contract_str(self) -> &'static str {
162        match self {
163            Self::Lookup => "lookup",
164            Self::List => "list",
165            Self::Timeline => "timeline",
166            Self::Aggregate => "aggregate",
167        }
168    }
169}
170
171#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
172pub enum ReadOutputShapeArg {
173    Item,
174    Collection,
175    Series,
176    Aggregate,
177    #[value(name = "analysis_handoff")]
178    AnalysisHandoff,
179}
180
181impl ReadOutputShapeArg {
182    fn as_contract_str(self) -> &'static str {
183        match self {
184            Self::Item => "item",
185            Self::Collection => "collection",
186            Self::Series => "series",
187            Self::Aggregate => "aggregate",
188            Self::AnalysisHandoff => "analysis_handoff",
189        }
190    }
191}
192
193#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
194pub enum ReadAnalysisModeArg {
195    None,
196    #[value(name = "allow_hybrid")]
197    AllowHybrid,
198    #[value(name = "require_analysis")]
199    RequireAnalysis,
200}
201
202impl ReadAnalysisModeArg {
203    fn as_contract_str(self) -> &'static str {
204        match self {
205            Self::None => "none",
206            Self::AllowHybrid => "allow_hybrid",
207            Self::RequireAnalysis => "require_analysis",
208        }
209    }
210}
211
212#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
213pub enum ReadSortDirectionArg {
214    Asc,
215    Desc,
216}
217
218impl ReadSortDirectionArg {
219    fn as_contract_str(self) -> &'static str {
220        match self {
221            Self::Asc => "asc",
222            Self::Desc => "desc",
223        }
224    }
225}
226
227pub async fn run(api_url: &str, token: Option<&str>, command: ReadCommands) -> i32 {
228    match command {
229        ReadCommands::Sources(args) => read_sources(api_url, token, args).await,
230        ReadCommands::Query(args) => read_query(api_url, token, args).await,
231    }
232}
233
234pub async fn run_query(api_url: &str, token: Option<&str>, args: ReadQueryArgs) -> i32 {
235    read_query(api_url, token, args).await
236}
237
238async fn read_sources(api_url: &str, token: Option<&str>, args: ReadSourcesArgs) -> i32 {
239    let query = vec![(
240        "section".to_string(),
241        READ_SOURCE_CATALOG_SECTION.to_string(),
242    )];
243
244    if dry_run_enabled() {
245        return emit_dry_run_request(
246            &reqwest::Method::GET,
247            api_url,
248            READ_SOURCE_CATALOG_ENDPOINT,
249            token.is_some(),
250            None,
251            &query,
252            &[],
253            false,
254            Some("Fetch the machine-readable read source catalog for agent routing."),
255        );
256    }
257
258    let response = raw_api_request_with_query(
259        api_url,
260        reqwest::Method::GET,
261        READ_SOURCE_CATALOG_ENDPOINT,
262        token,
263        &query,
264    )
265    .await
266    .unwrap_or_else(|error| {
267        exit_error(
268            &format!("Failed to reach {READ_SOURCE_CATALOG_ENDPOINT}: {error}"),
269            Some("Check API availability/auth and retry."),
270        )
271    });
272
273    let (status, body) = response;
274    if (200..=299).contains(&status) {
275        let output = build_read_sources_output(body, args.source_id.as_deref())
276            .unwrap_or_else(|(message, docs_hint)| exit_error(&message, docs_hint.as_deref()));
277        print_json_stdout(&output);
278        0
279    } else {
280        print_json_stderr(&body);
281        if (400..=499).contains(&status) { 1 } else { 2 }
282    }
283}
284
285async fn read_query(api_url: &str, token: Option<&str>, args: ReadQueryArgs) -> i32 {
286    let body = build_read_query_body(&args);
287    execute_json_read(api_url, token, READ_QUERY_ENDPOINT, body).await
288}
289
290async fn execute_json_read(api_url: &str, token: Option<&str>, path: &str, body: Value) -> i32 {
291    if dry_run_enabled() {
292        return emit_dry_run_request(
293            &reqwest::Method::POST,
294            api_url,
295            path,
296            token.is_some(),
297            Some(&body),
298            &[],
299            &[],
300            false,
301            None,
302        );
303    }
304
305    let response = raw_api_request_json(
306        api_url,
307        reqwest::Method::POST,
308        path,
309        token,
310        Some(body),
311        &[],
312        &[],
313    )
314    .await
315    .unwrap_or_else(|error| {
316        exit_error(
317            &format!("Failed to reach {path}: {error}"),
318            Some("Check API availability/auth and retry."),
319        )
320    });
321
322    emit_json_response(response)
323}
324
325fn emit_json_response(response: RawApiResponse) -> i32 {
326    if (200..=299).contains(&response.status) {
327        print_json_stdout(&response.body);
328        0
329    } else {
330        print_json_stderr(&response.body);
331        if (400..=499).contains(&response.status) {
332            1
333        } else {
334            2
335        }
336    }
337}
338
339fn build_read_query_body(args: &ReadQueryArgs) -> Value {
340    let mut body = Map::new();
341    body.insert(
342        "schema_version".to_string(),
343        json!(READ_QUERY_SCHEMA_VERSION),
344    );
345    body.insert(
346        "source_id".to_string(),
347        json!(canonical_read_source_id(&required_non_empty_string(
348            &args.source_id,
349            "--source-id",
350        ))),
351    );
352    body.insert(
353        "read_kind".to_string(),
354        json!(args.read_kind.as_contract_str()),
355    );
356
357    if let Some(subject) = optional_non_empty_string(args.subject.as_deref()) {
358        body.insert("subject".to_string(), json!(subject));
359    }
360
361    let mut filters =
362        parse_optional_json_object_arg(args.filters_json.as_deref(), "--filters-json");
363    insert_optional_string_checked(
364        &mut filters,
365        "lookup_key",
366        args.lookup_key.as_deref(),
367        "--lookup-key",
368    );
369    insert_optional_string_checked(
370        &mut filters,
371        "exercise_id_or_label",
372        args.exercise_id_or_label.as_deref(),
373        "--exercise-id-or-label",
374    );
375    insert_optional_string_checked(
376        &mut filters,
377        "session_state",
378        args.session_state.as_deref(),
379        "--session-state",
380    );
381    insert_optional_string_checked(&mut filters, "date", args.date.as_deref(), "--date");
382    insert_optional_string_checked(
383        &mut filters,
384        "date_from",
385        args.date_from.as_deref(),
386        "--date-from",
387    );
388    insert_optional_string_checked(
389        &mut filters,
390        "date_to",
391        args.date_to.as_deref(),
392        "--date-to",
393    );
394    if !filters.is_empty() {
395        body.insert("filters".to_string(), Value::Object(filters));
396    }
397
398    let mut query = parse_optional_json_object_arg(args.query_json.as_deref(), "--query-json");
399    if let Some(fields) = normalize_non_empty_string_list(args.fields.as_ref(), "--fields") {
400        insert_json_field_checked(&mut query, "fields", json!(fields), "--fields");
401    }
402    insert_optional_string_checked(
403        &mut query,
404        "group_by",
405        args.group_by.as_deref(),
406        "--group-by",
407    );
408    insert_optional_string_checked(
409        &mut query,
410        "aggregate",
411        args.aggregate.as_deref(),
412        "--aggregate",
413    );
414
415    match (
416        optional_non_empty_string(args.sort_field.as_deref()),
417        args.sort_direction,
418    ) {
419        (Some(field), direction) => {
420            let mut sort = Map::new();
421            sort.insert("field".to_string(), json!(field));
422            if let Some(direction) = direction {
423                sort.insert("direction".to_string(), json!(direction.as_contract_str()));
424            }
425            insert_json_field_checked(&mut query, "sort", Value::Object(sort), "--sort-field");
426        }
427        (None, Some(_)) => exit_error(
428            "`kura read_query` requires --sort-field when --sort-direction is present.",
429            Some("Provide --sort-field together with --sort-direction, or omit both."),
430        ),
431        (None, None) => {}
432    }
433
434    if !query.is_empty() {
435        body.insert("query".to_string(), Value::Object(query));
436    }
437
438    if let Some(limit) = args.limit {
439        body.insert("limit".to_string(), json!(limit));
440    }
441    insert_optional_string(&mut body, "cursor", args.cursor.as_deref());
442    insert_optional_string(&mut body, "result_handle", args.result_handle.as_deref());
443    if let Some(output_shape) = args.output_shape {
444        body.insert(
445            "output_shape".to_string(),
446            json!(output_shape.as_contract_str()),
447        );
448    }
449    if let Some(analysis_mode) = args.analysis_mode {
450        body.insert(
451            "analysis_mode".to_string(),
452            json!(analysis_mode.as_contract_str()),
453        );
454    }
455
456    Value::Object(body)
457}
458
459fn build_read_sources_output(
460    section_body: Value,
461    selected_source_id: Option<&str>,
462) -> Result<Value, (String, Option<String>)> {
463    let catalog = section_body
464        .pointer("/value/contract")
465        .and_then(Value::as_object)
466        .cloned()
467        .ok_or_else(|| {
468            (
469                "Read source catalog section did not expose a contract payload.".to_string(),
470                Some(
471                    "Check system_config.conventions::public_agent_read_source_catalog_v1 and retry."
472                        .to_string(),
473                ),
474            )
475        })?;
476    let root = section_body.as_object().ok_or_else(|| {
477        (
478            "Read source catalog response must be a JSON object.".to_string(),
479            Some("Retry once the server returns the readable source catalog section.".to_string()),
480        )
481    })?;
482
483    let catalog_ref = READ_SOURCE_CATALOG_SECTION
484        .split("::")
485        .last()
486        .unwrap_or("public_agent_read_source_catalog_v1");
487
488    let section = root
489        .get("section")
490        .and_then(Value::as_str)
491        .unwrap_or(READ_SOURCE_CATALOG_SECTION);
492    let handle = root.get("handle").cloned().unwrap_or(Value::Null);
493    let version = root.get("version").cloned().unwrap_or(Value::Null);
494    let updated_at = root.get("updated_at").cloned().unwrap_or(Value::Null);
495
496    if let Some(source_id) =
497        selected_source_id.and_then(|value| optional_non_empty_string(Some(value)))
498    {
499        let sources = catalog
500            .get("sources")
501            .and_then(Value::as_object)
502            .ok_or_else(|| {
503                (
504                    "Read source catalog contract did not expose a sources map.".to_string(),
505                    Some("Check the source catalog contract and retry.".to_string()),
506                )
507            })?;
508        let source = sources.get(source_id.as_str()).cloned().ok_or_else(|| {
509            let available = sources.keys().cloned().collect::<Vec<_>>();
510            (
511                format!("Unknown read source_id '{source_id}'."),
512                Some(format!(
513                    "Use one declared source_id from the read_query schema, for example: {}.",
514                    available.join(", ")
515                )),
516            )
517        })?;
518        return Ok(json!({
519            "catalog_ref": catalog_ref,
520            "catalog_section": section,
521            "catalog_handle": handle,
522            "catalog_version": version,
523            "catalog_updated_at": updated_at,
524            "source_id": source_id,
525            "source": source,
526        }));
527    }
528
529    let mut output = catalog;
530    output.insert("catalog_ref".to_string(), json!(catalog_ref));
531    output.insert("catalog_section".to_string(), json!(section));
532    output.insert("catalog_handle".to_string(), handle);
533    output.insert("catalog_version".to_string(), version);
534    output.insert("catalog_updated_at".to_string(), updated_at);
535    Ok(Value::Object(output))
536}
537
538fn optional_non_empty_string(raw: Option<&str>) -> Option<String> {
539    raw.map(str::trim)
540        .filter(|value| !value.is_empty())
541        .map(str::to_string)
542}
543
544fn required_non_empty_string(raw: &str, flag_name: &str) -> String {
545    optional_non_empty_string(Some(raw)).unwrap_or_else(|| {
546        exit_error(
547            &format!("`kura read` requires a non-empty {flag_name} value."),
548            Some("Provide the exact contract value instead of an empty string."),
549        )
550    })
551}
552
553fn canonical_read_source_id(source_id: &str) -> String {
554    match source_id
555        .trim()
556        .to_ascii_lowercase()
557        .replace('-', "_")
558        .replace(' ', "_")
559        .as_str()
560    {
561        "benchmark"
562        | "benchmarks"
563        | "benchmark_results"
564        | "performance_test"
565        | "performance_test_truth"
566        | "jump_tests" => "performance_tests".to_string(),
567        _ => source_id.trim().to_string(),
568    }
569}
570
571fn normalize_non_empty_string_list(
572    values: Option<&Vec<String>>,
573    flag_name: &str,
574) -> Option<Vec<String>> {
575    let values = values?;
576    let normalized = values
577        .iter()
578        .filter_map(|value| optional_non_empty_string(Some(value.as_str())))
579        .collect::<Vec<_>>();
580    if normalized.is_empty() {
581        exit_error(
582            &format!("`kura read_query` requires at least one non-empty value for {flag_name}."),
583            Some("Provide one or more comma-separated contract field names."),
584        );
585    }
586    Some(normalized)
587}
588
589fn parse_optional_json_object_arg(raw: Option<&str>, flag_name: &str) -> Map<String, Value> {
590    let Some(raw) = optional_non_empty_string(raw) else {
591        return Map::new();
592    };
593    let parsed: Value = serde_json::from_str(raw.as_str()).unwrap_or_else(|error| {
594        exit_error(
595            &format!("`kura read_query` could not parse {flag_name} as JSON: {error}"),
596            Some("Pass a JSON object, for example --filters-json '{\"date\":\"2026-04-02\"}'."),
597        )
598    });
599    parsed.as_object().cloned().unwrap_or_else(|| {
600        exit_error(
601            &format!("`kura read_query` requires {flag_name} to be a JSON object."),
602            Some("Use an object like '{\"date_from\":\"2026-04-01\"}', not an array or string."),
603        )
604    })
605}
606
607fn insert_json_field_checked(
608    target: &mut Map<String, Value>,
609    field: &str,
610    value: Value,
611    flag_name: &str,
612) {
613    if let Some(existing) = target.get(field) {
614        if existing == &value {
615            return;
616        }
617        exit_error(
618            &format!("`kura read_query` received {field} twice with different values."),
619            Some(&format!(
620                "Use either {flag_name} or the same field inside the JSON object, not both."
621            )),
622        );
623    }
624    target.insert(field.to_string(), value);
625}
626
627fn insert_optional_string_checked(
628    target: &mut Map<String, Value>,
629    field: &str,
630    raw: Option<&str>,
631    flag_name: &str,
632) {
633    if let Some(value) = optional_non_empty_string(raw) {
634        insert_json_field_checked(target, field, json!(value), flag_name);
635    }
636}
637
638fn insert_optional_string(target: &mut Map<String, Value>, field: &str, raw: Option<&str>) {
639    if let Some(value) = optional_non_empty_string(raw) {
640        target.insert(field.to_string(), json!(value));
641    }
642}
643
644#[cfg(test)]
645mod tests {
646    use super::*;
647
648    #[test]
649    fn build_read_query_body_serializes_filters_and_bounded_program() {
650        let body = build_read_query_body(&ReadQueryArgs {
651            source_id: "authoritative_training_memory".to_string(),
652            read_kind: ReadKindArg::List,
653            subject: Some("exercise_history".to_string()),
654            lookup_key: None,
655            exercise_id_or_label: Some("Back Squat".to_string()),
656            session_state: Some("closed".to_string()),
657            date: None,
658            date_from: Some("2026-03-01".to_string()),
659            date_to: Some("2026-04-01".to_string()),
660            filters_json: None,
661            fields: Some(vec![
662                "training_date".to_string(),
663                "exercise_label".to_string(),
664                "top_load_kg".to_string(),
665            ]),
666            group_by: Some("exercise_label".to_string()),
667            aggregate: Some("top_load_max_kg".to_string()),
668            sort_field: Some("top_load_max_kg".to_string()),
669            sort_direction: Some(ReadSortDirectionArg::Desc),
670            query_json: None,
671            limit: Some(20),
672            cursor: Some("rq1:opaque".to_string()),
673            result_handle: None,
674            output_shape: Some(ReadOutputShapeArg::Aggregate),
675            analysis_mode: Some(ReadAnalysisModeArg::AllowHybrid),
676        });
677
678        assert_eq!(body["schema_version"], json!(READ_QUERY_SCHEMA_VERSION));
679        assert_eq!(body["source_id"], json!("authoritative_training_memory"));
680        assert_eq!(body["read_kind"], json!("list"));
681        assert_eq!(body["subject"], json!("exercise_history"));
682        assert_eq!(body["filters"]["exercise_id_or_label"], json!("Back Squat"));
683        assert_eq!(body["filters"]["session_state"], json!("closed"));
684        assert_eq!(body["filters"]["date_from"], json!("2026-03-01"));
685        assert_eq!(body["filters"]["date_to"], json!("2026-04-01"));
686        assert_eq!(
687            body["query"]["fields"],
688            json!(["training_date", "exercise_label", "top_load_kg"])
689        );
690        assert_eq!(body["query"]["group_by"], json!("exercise_label"));
691        assert_eq!(body["query"]["aggregate"], json!("top_load_max_kg"));
692        assert_eq!(body["query"]["sort"]["field"], json!("top_load_max_kg"));
693        assert_eq!(body["query"]["sort"]["direction"], json!("desc"));
694        assert_eq!(body["limit"], json!(20));
695        assert_eq!(body["cursor"], json!("rq1:opaque"));
696        assert_eq!(body["output_shape"], json!("aggregate"));
697        assert_eq!(body["analysis_mode"], json!("allow_hybrid"));
698    }
699
700    #[test]
701    fn build_read_query_body_omits_empty_optional_sections() {
702        let body = build_read_query_body(&ReadQueryArgs {
703            source_id: "recovery".to_string(),
704            read_kind: ReadKindArg::Timeline,
705            subject: None,
706            lookup_key: Some("  ".to_string()),
707            exercise_id_or_label: None,
708            session_state: None,
709            date: None,
710            date_from: None,
711            date_to: None,
712            filters_json: None,
713            fields: None,
714            group_by: None,
715            aggregate: None,
716            sort_field: None,
717            sort_direction: None,
718            query_json: None,
719            limit: None,
720            cursor: None,
721            result_handle: None,
722            output_shape: Some(ReadOutputShapeArg::Series),
723            analysis_mode: None,
724        });
725
726        assert!(body.get("filters").is_none());
727        assert!(body.get("query").is_none());
728        assert_eq!(body["source_id"], json!("recovery"));
729        assert_eq!(body["read_kind"], json!("timeline"));
730        assert_eq!(body["output_shape"], json!("series"));
731    }
732
733    #[test]
734    fn build_read_query_body_accepts_json_filters_and_query() {
735        let body = build_read_query_body(&ReadQueryArgs {
736            source_id: "authoritative_training_memory".to_string(),
737            read_kind: ReadKindArg::Aggregate,
738            subject: None,
739            lookup_key: None,
740            exercise_id_or_label: None,
741            session_state: None,
742            date: None,
743            date_from: None,
744            date_to: None,
745            filters_json: Some(r#"{"date_from":"2026-03-01","date_to":"2026-04-01"}"#.to_string()),
746            fields: None,
747            group_by: None,
748            aggregate: None,
749            sort_field: None,
750            sort_direction: None,
751            query_json: Some(
752                r#"{"group_by":"exercise_id","aggregate":"top_load_max_kg","sort":{"field":"top_load_max_kg","direction":"desc"}}"#.to_string(),
753            ),
754            limit: Some(10),
755            cursor: None,
756            result_handle: None,
757            output_shape: Some(ReadOutputShapeArg::Aggregate),
758            analysis_mode: Some(ReadAnalysisModeArg::AllowHybrid),
759        });
760
761        assert_eq!(body["filters"]["date_from"], json!("2026-03-01"));
762        assert_eq!(body["filters"]["date_to"], json!("2026-04-01"));
763        assert_eq!(body["query"]["group_by"], json!("exercise_id"));
764        assert_eq!(body["query"]["aggregate"], json!("top_load_max_kg"));
765        assert_eq!(body["query"]["sort"]["direction"], json!("desc"));
766    }
767
768    #[test]
769    fn build_read_query_body_normalizes_benchmark_source_aliases() {
770        let body = build_read_query_body(&ReadQueryArgs {
771            source_id: "benchmarks".to_string(),
772            read_kind: ReadKindArg::Timeline,
773            subject: Some("test_timeline".to_string()),
774            lookup_key: None,
775            exercise_id_or_label: None,
776            session_state: None,
777            date: None,
778            date_from: None,
779            date_to: None,
780            filters_json: Some(r#"{"test_type":"cmj"}"#.to_string()),
781            fields: None,
782            group_by: None,
783            aggregate: None,
784            sort_field: None,
785            sort_direction: None,
786            query_json: None,
787            limit: None,
788            cursor: None,
789            result_handle: None,
790            output_shape: Some(ReadOutputShapeArg::Series),
791            analysis_mode: None,
792        });
793
794        assert_eq!(body["source_id"], json!("performance_tests"));
795        assert_eq!(body["filters"]["test_type"], json!("cmj"));
796    }
797
798    #[test]
799    fn build_read_sources_output_returns_single_source_card_when_requested() {
800        let output = build_read_sources_output(
801            json!({
802                "section": READ_SOURCE_CATALOG_SECTION,
803                "handle": "system_config/global@v9",
804                "version": 9,
805                "updated_at": "2026-04-02T10:00:00Z",
806                "value": {
807                    "rules": ["x"],
808                    "contract": {
809                        "schema_version": "public_agent_read_source_catalog.v1",
810                        "sources": {
811                            "recovery": {
812                                "source_id": "recovery",
813                                "supported_read_kinds": ["lookup", "list", "timeline", "aggregate"]
814                            }
815                        }
816                    }
817                }
818            }),
819            Some("recovery"),
820        )
821        .expect("single-source output should build");
822
823        assert_eq!(
824            output["catalog_ref"],
825            json!("public_agent_read_source_catalog_v1")
826        );
827        assert_eq!(output["source_id"], json!("recovery"));
828        assert_eq!(output["source"]["source_id"], json!("recovery"));
829        assert_eq!(output["catalog_handle"], json!("system_config/global@v9"));
830    }
831}