use std::io::Read;
use chrono::{DateTime, Utc};
use clap::{Args, Subcommand, ValueEnum};
use serde_json::{Value, json};
use uuid::Uuid;
use crate::util::{
api_request, exit_error, print_json_stderr, print_json_stdout, raw_api_request,
raw_api_request_with_query, read_json_from_file,
};
#[derive(Subcommand)]
pub enum AgentCommands {
Capabilities,
LogTraining(LogTrainingArgs),
LogTurn(LogTurnArgs),
LoggingBootstrap {
#[arg(long)]
intent: Option<String>,
},
Context {
#[arg(long)]
exercise_limit: Option<u32>,
#[arg(long)]
strength_limit: Option<u32>,
#[arg(long)]
custom_limit: Option<u32>,
#[arg(long)]
task_intent: Option<String>,
#[arg(long)]
include_system: Option<bool>,
#[arg(long)]
budget_tokens: Option<u32>,
},
SectionIndex {
#[arg(long)]
exercise_limit: Option<u32>,
#[arg(long)]
strength_limit: Option<u32>,
#[arg(long)]
custom_limit: Option<u32>,
#[arg(long)]
task_intent: Option<String>,
#[arg(long)]
include_system: Option<bool>,
#[arg(long)]
budget_tokens: Option<u32>,
},
SectionFetch {
#[arg(long)]
section: String,
#[arg(long)]
limit: Option<u32>,
#[arg(long)]
cursor: Option<String>,
#[arg(long)]
fields: Option<String>,
#[arg(long)]
task_intent: Option<String>,
},
AnswerAdmissibility {
#[arg(long)]
task_intent: String,
#[arg(long)]
draft_answer: String,
},
#[command(name = "write-structured", alias = "write-with-proof")]
WriteWithProof(WriteWithProofArgs),
Evidence {
#[command(subcommand)]
command: AgentEvidenceCommands,
},
SetSaveConfirmationMode {
#[arg(value_enum)]
mode: SaveConfirmationMode,
},
ResolveVisualization(ResolveVisualizationArgs),
Request(AgentRequestArgs),
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
pub enum SaveConfirmationMode {
Auto,
Always,
Never,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
pub enum SessionCompletionStatus {
Ongoing,
CompletedInBatch,
}
impl SessionCompletionStatus {
fn as_str(self) -> &'static str {
match self {
SessionCompletionStatus::Ongoing => "ongoing",
SessionCompletionStatus::CompletedInBatch => "completed_in_batch",
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
pub enum ConversationDraftMode {
Append,
Finalize,
}
impl ConversationDraftMode {
fn as_str(self) -> &'static str {
match self {
ConversationDraftMode::Append => "append",
ConversationDraftMode::Finalize => "finalize",
}
}
}
impl SaveConfirmationMode {
fn as_str(self) -> &'static str {
match self {
SaveConfirmationMode::Auto => "auto",
SaveConfirmationMode::Always => "always",
SaveConfirmationMode::Never => "never",
}
}
}
#[derive(Subcommand)]
pub enum AgentEvidenceCommands {
Event {
#[arg(long)]
event_id: Uuid,
},
}
#[derive(Args)]
pub struct AgentRequestArgs {
pub method: String,
pub path: String,
#[arg(long, short = 'd')]
pub data: Option<String>,
#[arg(long, short = 'f', conflicts_with = "data")]
pub data_file: Option<String>,
#[arg(long, short = 'q')]
pub query: Vec<String>,
#[arg(long, short = 'H')]
pub header: Vec<String>,
#[arg(long)]
pub raw: bool,
#[arg(long, short = 'i')]
pub include: bool,
}
#[derive(Args)]
pub struct LogTrainingArgs {
#[arg(
long,
short = 'd',
required_unless_present = "request_file",
conflicts_with = "request_file"
)]
pub data: Option<String>,
#[arg(
long,
short = 'f',
required_unless_present = "data",
conflicts_with = "data"
)]
pub request_file: Option<String>,
}
#[derive(Args)]
pub struct LogTurnArgs {
#[arg(value_name = "MESSAGE", required_unless_present = "message_file")]
pub message: Option<String>,
#[arg(long, conflicts_with = "message")]
pub message_file: Option<String>,
#[arg(long)]
pub session_id: Option<String>,
#[arg(long)]
pub modality: Option<String>,
#[arg(long)]
pub recorded_at: Option<String>,
#[arg(long)]
pub observed_at: Option<String>,
#[arg(long)]
pub idempotency_key: Option<String>,
}
#[derive(Args)]
pub struct WriteWithProofArgs {
#[arg(
long,
required_unless_present = "request_file",
conflicts_with = "request_file"
)]
pub events_file: Option<String>,
#[arg(
long,
required_unless_present = "request_file",
conflicts_with = "request_file"
)]
pub target: Vec<String>,
#[arg(long)]
pub verify_timeout_ms: Option<u64>,
#[arg(long, conflicts_with_all = ["request_file", "non_trivial_confirmation_file"])]
pub non_trivial_confirmation_token: Option<String>,
#[arg(long, conflicts_with_all = ["request_file", "non_trivial_confirmation_token"])]
pub non_trivial_confirmation_file: Option<String>,
#[arg(long, conflicts_with_all = ["request_file", "intent_handshake_file"])]
pub intent_goal: Option<String>,
#[arg(long, conflicts_with_all = ["request_file", "intent_goal"])]
pub intent_handshake_file: Option<String>,
#[arg(long, conflicts_with_all = ["request_file", "high_impact_confirmation_file"])]
pub high_impact_confirmation_token: Option<String>,
#[arg(long, conflicts_with_all = ["request_file", "high_impact_confirmation_token"])]
pub high_impact_confirmation_file: Option<String>,
#[arg(long, conflicts_with = "request_file")]
pub clarification_resolution_file: Vec<String>,
#[arg(long)]
pub resume_file: Option<String>,
#[arg(long)]
pub clarification_prompt_id: Option<Uuid>,
#[arg(long)]
pub clarification_route_family: Option<String>,
#[arg(long)]
pub clarification_protocol_variant: Option<String>,
#[arg(long)]
pub clarification_note: Option<String>,
#[arg(long, value_enum, conflicts_with = "request_file")]
pub session_status: Option<SessionCompletionStatus>,
#[arg(long, value_enum, conflicts_with = "request_file", hide = true)]
pub conversation_draft_mode: Option<ConversationDraftMode>,
#[arg(long, conflicts_with_all = ["events_file", "target", "verify_timeout_ms", "non_trivial_confirmation_token", "non_trivial_confirmation_file", "intent_goal", "intent_handshake_file", "high_impact_confirmation_token", "high_impact_confirmation_file", "clarification_resolution_file", "session_status", "conversation_draft_mode"])]
pub request_file: Option<String>,
}
#[derive(Args)]
pub struct ResolveVisualizationArgs {
#[arg(long, conflicts_with = "task_intent")]
pub request_file: Option<String>,
#[arg(long, required_unless_present = "request_file")]
pub task_intent: Option<String>,
#[arg(long)]
pub user_preference_override: Option<String>,
#[arg(long)]
pub complexity_hint: Option<String>,
#[arg(long, default_value_t = true)]
pub allow_rich_rendering: bool,
#[arg(long)]
pub spec_file: Option<String>,
#[arg(long)]
pub telemetry_session_id: Option<String>,
}
pub async fn run(api_url: &str, token: Option<&str>, command: AgentCommands) -> i32 {
match command {
AgentCommands::Capabilities => capabilities(api_url, token).await,
AgentCommands::LogTraining(args) => log_training(api_url, token, args).await,
AgentCommands::LogTurn(args) => log_turn(api_url, token, args).await,
AgentCommands::LoggingBootstrap { intent } => {
logging_bootstrap(api_url, token, intent).await
}
AgentCommands::Context {
exercise_limit,
strength_limit,
custom_limit,
task_intent,
include_system,
budget_tokens,
} => {
context(
api_url,
token,
exercise_limit,
strength_limit,
custom_limit,
task_intent,
include_system,
budget_tokens,
)
.await
}
AgentCommands::SectionIndex {
exercise_limit,
strength_limit,
custom_limit,
task_intent,
include_system,
budget_tokens,
} => {
section_index(
api_url,
token,
exercise_limit,
strength_limit,
custom_limit,
task_intent,
include_system,
budget_tokens,
)
.await
}
AgentCommands::SectionFetch {
section,
limit,
cursor,
fields,
task_intent,
} => section_fetch(api_url, token, section, limit, cursor, fields, task_intent).await,
AgentCommands::AnswerAdmissibility {
task_intent,
draft_answer,
} => answer_admissibility(api_url, token, task_intent, draft_answer).await,
AgentCommands::WriteWithProof(args) => write_with_proof(api_url, token, args).await,
AgentCommands::Evidence { command } => match command {
AgentEvidenceCommands::Event { event_id } => {
evidence_event(api_url, token, event_id).await
}
},
AgentCommands::SetSaveConfirmationMode { mode } => {
set_save_confirmation_mode(api_url, token, mode).await
}
AgentCommands::ResolveVisualization(args) => {
resolve_visualization(api_url, token, args).await
}
AgentCommands::Request(args) => request(api_url, token, args).await,
}
}
async fn capabilities(api_url: &str, token: Option<&str>) -> i32 {
api_request(
api_url,
reqwest::Method::GET,
"/v1/agent/capabilities",
token,
None,
&[],
&[],
false,
false,
)
.await
}
fn normalize_logging_bootstrap_intent(intent: &str) -> Option<String> {
let normalized = intent.trim().to_ascii_lowercase();
if normalized.is_empty() {
None
} else {
Some(normalized)
}
}
fn extract_logging_bootstrap_contract(capabilities: &Value) -> Option<Value> {
capabilities
.pointer("/task_bootstrap_contracts/logging")
.cloned()
.filter(|value| value.is_object())
}
fn available_logging_bootstrap_intents(contract: &Value) -> Vec<String> {
let Some(recipes) = contract.get("intent_recipes").and_then(Value::as_array) else {
return Vec::new();
};
recipes
.iter()
.filter_map(|recipe| recipe.get("intent_id").and_then(Value::as_str))
.map(str::to_string)
.collect()
}
fn build_logging_bootstrap_output(contract: &Value, intent: Option<&str>) -> Result<Value, Value> {
let Some(intent) = intent else {
return Ok(contract.clone());
};
let Some(normalized_intent) = normalize_logging_bootstrap_intent(intent) else {
return Err(json!({
"error": "usage_error",
"message": "--intent must not be empty",
}));
};
let recipes = contract
.get("intent_recipes")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let Some(recipe) = recipes.into_iter().find(|recipe| {
recipe
.get("intent_id")
.and_then(Value::as_str)
.map(|value| value.eq_ignore_ascii_case(&normalized_intent))
.unwrap_or(false)
}) else {
return Err(json!({
"error": "usage_error",
"message": format!("Unknown logging bootstrap intent: {intent}"),
"available_intents": available_logging_bootstrap_intents(contract),
}));
};
Ok(json!({
"schema_version": contract.get("schema_version").cloned().unwrap_or(Value::Null),
"task_family": contract.get("task_family").cloned().unwrap_or(Value::Null),
"bootstrap_surface": contract.get("bootstrap_surface").cloned().unwrap_or(Value::Null),
"intent_recipe": recipe,
"save_states": contract.get("save_states").cloned().unwrap_or_else(|| json!([])),
"upgrade_hints": contract.get("upgrade_hints").cloned().unwrap_or_else(|| json!([])),
"integrity_guards": contract.get("integrity_guards").cloned().unwrap_or_else(|| json!([])),
}))
}
fn extract_preferred_structured_write_endpoint(capabilities: &Value) -> Option<String> {
capabilities
.get("preferred_structured_write_endpoint")
.or_else(|| capabilities.get("preferred_write_endpoint"))
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| *value == "/v2/agent/write-with-proof")
.map(str::to_string)
}
async fn negotiated_write_with_proof_endpoint(api_url: &str, token: Option<&str>) -> String {
match raw_api_request(
api_url,
reqwest::Method::GET,
"/v1/agent/capabilities",
token,
)
.await
{
Ok((status, body)) if (200..=299).contains(&status) => {
extract_preferred_structured_write_endpoint(&body)
.unwrap_or_else(|| "/v2/agent/write-with-proof".to_string())
}
_ => "/v2/agent/write-with-proof".to_string(),
}
}
const LOG_TRAINING_SCHEMA_VERSION: &str = "routine_training_tool_contract.v1";
const LOG_TURN_SCHEMA_VERSION: &str = "agent_evidence_ingress_request.v1";
const CLARIFICATION_RESOLUTION_SCHEMA_VERSION: &str = "agent_logging_clarification_resolution.v1";
#[derive(Debug, Clone, PartialEq, Eq)]
struct ResumeClarificationPrompt {
prompt_id: Uuid,
scope_kind: String,
accepted_resolution_fields: Vec<String>,
}
pub async fn log_turn(api_url: &str, token: Option<&str>, args: LogTurnArgs) -> i32 {
let body = build_log_turn_request(&args);
api_request(
api_url,
reqwest::Method::POST,
"/v3/agent/evidence",
token,
Some(body),
&[],
&[],
false,
false,
)
.await
}
pub async fn log_training(api_url: &str, token: Option<&str>, args: LogTrainingArgs) -> i32 {
let body = build_log_training_request(&args);
api_request(
api_url,
reqwest::Method::POST,
"/v3/agent/training",
token,
Some(body),
&[],
&[],
false,
false,
)
.await
}
fn build_log_turn_request(args: &LogTurnArgs) -> Value {
let message = resolve_log_turn_message(args.message.as_deref(), args.message_file.as_deref());
let modality = normalize_non_empty_arg(args.modality.as_deref(), "--modality");
let recorded_at = normalize_rfc3339_arg(args.recorded_at.as_deref(), "--recorded-at");
let observed_at = normalize_rfc3339_arg(args.observed_at.as_deref(), "--observed-at");
let session_id = normalize_non_empty_arg(args.session_id.as_deref(), "--session-id");
let idempotency_key =
normalize_non_empty_arg(args.idempotency_key.as_deref(), "--idempotency-key");
let mut body = json!({
"schema_version": LOG_TURN_SCHEMA_VERSION,
"text_evidence": message,
"modality": modality.unwrap_or_else(|| "chat_message".to_string()),
"source": {
"surface": "cli",
"client": "kura-cli",
"command_family": "log_turn"
},
"metadata": {
"ingress_surface": "kura-cli.log-turn"
}
});
if let Some(session_id) = session_id {
body["session_hint"] = json!({
"session_id": session_id,
});
}
if let Some(recorded_at) = recorded_at {
body["recorded_at"] = json!(recorded_at);
}
if let Some(observed_at) = observed_at {
body["observed_at"] = json!(observed_at);
}
if let Some(idempotency_key) = idempotency_key {
body["idempotency_key"] = json!(idempotency_key);
}
body
}
fn build_log_training_request(args: &LogTrainingArgs) -> Value {
let mut body = resolve_log_training_request(args.data.as_deref(), args.request_file.as_deref());
let object = body.as_object_mut().unwrap_or_else(|| {
exit_error(
"log-training payload must be a JSON object",
Some(
"Pass a JSON object with date plus entries, for example via `kura log --request-file payload.json`.",
),
)
});
object
.entry("schema_version".to_string())
.or_insert_with(|| json!(LOG_TRAINING_SCHEMA_VERSION));
let source_context_value = object
.entry("source_context".to_string())
.or_insert_with(|| json!({}));
let source_context = source_context_value.as_object_mut().unwrap_or_else(|| {
exit_error(
"log-training source_context must be a JSON object when provided",
Some("Use source_context as an object, or omit it."),
)
});
source_context
.entry("surface".to_string())
.or_insert_with(|| json!("cli"));
source_context
.entry("client".to_string())
.or_insert_with(|| json!("kura-cli"));
source_context
.entry("command_family".to_string())
.or_insert_with(|| json!("log_training"));
body
}
async fn logging_bootstrap(api_url: &str, token: Option<&str>, intent: Option<String>) -> i32 {
let (status, body) = raw_api_request(
api_url,
reqwest::Method::GET,
"/v1/agent/capabilities",
token,
)
.await
.unwrap_or_else(|error| {
exit_error(
&format!("Failed to fetch /v1/agent/capabilities for logging bootstrap: {error}"),
Some("Retry once the API is reachable, or fall back to `kura agent capabilities`."),
)
});
if !(200..=299).contains(&status) {
print_json_stderr(&body);
return if (400..500).contains(&status) { 1 } else { 2 };
}
let Some(contract) = extract_logging_bootstrap_contract(&body) else {
exit_error(
"agent capabilities response is missing task_bootstrap_contracts.logging",
Some("Retry after `kura agent capabilities` succeeds, or inspect the full manifest."),
);
};
match build_logging_bootstrap_output(&contract, intent.as_deref()) {
Ok(output) => {
print_json_stdout(&output);
0
}
Err(error) => {
print_json_stderr(&error);
4
}
}
}
pub async fn context(
api_url: &str,
token: Option<&str>,
exercise_limit: Option<u32>,
strength_limit: Option<u32>,
custom_limit: Option<u32>,
task_intent: Option<String>,
include_system: Option<bool>,
budget_tokens: Option<u32>,
) -> i32 {
let query = build_context_query(
exercise_limit,
strength_limit,
custom_limit,
task_intent,
include_system,
budget_tokens,
);
api_request(
api_url,
reqwest::Method::GET,
"/v1/agent/context",
token,
None,
&query,
&[],
false,
false,
)
.await
}
pub async fn section_index(
api_url: &str,
token: Option<&str>,
exercise_limit: Option<u32>,
strength_limit: Option<u32>,
custom_limit: Option<u32>,
task_intent: Option<String>,
include_system: Option<bool>,
budget_tokens: Option<u32>,
) -> i32 {
let query = build_context_query(
exercise_limit,
strength_limit,
custom_limit,
task_intent,
include_system,
budget_tokens,
);
api_request(
api_url,
reqwest::Method::GET,
"/v1/agent/context/section-index",
token,
None,
&query,
&[],
false,
false,
)
.await
}
pub async fn section_fetch(
api_url: &str,
token: Option<&str>,
section: String,
limit: Option<u32>,
cursor: Option<String>,
fields: Option<String>,
task_intent: Option<String>,
) -> i32 {
let query = build_section_fetch_query(section, limit, cursor, fields, task_intent);
api_request(
api_url,
reqwest::Method::GET,
"/v1/agent/context/section-fetch",
token,
None,
&query,
&[],
false,
false,
)
.await
}
pub async fn answer_admissibility(
api_url: &str,
token: Option<&str>,
task_intent: String,
draft_answer: String,
) -> i32 {
let body = json!({
"task_intent": task_intent,
"draft_answer": draft_answer,
});
api_request(
api_url,
reqwest::Method::POST,
"/v1/agent/answer-admissibility",
token,
Some(body),
&[],
&[],
false,
false,
)
.await
}
async fn evidence_event(api_url: &str, token: Option<&str>, event_id: Uuid) -> i32 {
let path = format!("/v1/agent/evidence/event/{event_id}");
api_request(
api_url,
reqwest::Method::GET,
&path,
token,
None,
&[],
&[],
false,
false,
)
.await
}
async fn set_save_confirmation_mode(
api_url: &str,
token: Option<&str>,
mode: SaveConfirmationMode,
) -> i32 {
let body = json!({
"timestamp": Utc::now().to_rfc3339(),
"event_type": "preference.set",
"data": {
"key": "save_confirmation_mode",
"value": mode.as_str(),
},
"metadata": {
"source": "cli",
"agent": "kura-cli",
"idempotency_key": Uuid::now_v7().to_string(),
}
});
api_request(
api_url,
reqwest::Method::POST,
"/v1/events",
token,
Some(body),
&[],
&[],
false,
false,
)
.await
}
async fn request(api_url: &str, token: Option<&str>, args: AgentRequestArgs) -> i32 {
let method = parse_method(&args.method);
let path = normalize_agent_path(&args.path);
let query = parse_query_pairs(&args.query);
let headers = parse_headers(&args.header);
let body = resolve_body(args.data.as_deref(), args.data_file.as_deref());
api_request(
api_url,
method,
&path,
token,
body,
&query,
&headers,
args.raw,
args.include,
)
.await
}
pub async fn write_with_proof(api_url: &str, token: Option<&str>, args: WriteWithProofArgs) -> i32 {
let body = if let Some(file) = args.request_file.as_deref() {
let mut request = load_full_request(file);
if let Some(clarification_resolutions) = resolve_clarification_resolutions(
&args.clarification_resolution_file,
args.resume_file.as_deref(),
args.clarification_prompt_id,
args.clarification_route_family.as_deref(),
args.clarification_protocol_variant.as_deref(),
args.clarification_note.as_deref(),
) {
request["clarification_resolutions"] = json!(clarification_resolutions);
}
request
} else {
build_request_from_events_and_targets(
api_url,
token,
args.events_file.as_deref().unwrap_or(""),
&args.target,
args.verify_timeout_ms,
args.intent_goal.as_deref(),
args.intent_handshake_file.as_deref(),
args.high_impact_confirmation_token.as_deref(),
args.high_impact_confirmation_file.as_deref(),
args.non_trivial_confirmation_token.as_deref(),
args.non_trivial_confirmation_file.as_deref(),
&args.clarification_resolution_file,
args.resume_file.as_deref(),
args.clarification_prompt_id,
args.clarification_route_family.as_deref(),
args.clarification_protocol_variant.as_deref(),
args.clarification_note.as_deref(),
args.session_status,
args.conversation_draft_mode,
)
.await
};
let write_endpoint = negotiated_write_with_proof_endpoint(api_url, token).await;
api_request(
api_url,
reqwest::Method::POST,
&write_endpoint,
token,
Some(body),
&[],
&[],
false,
false,
)
.await
}
async fn resolve_visualization(
api_url: &str,
token: Option<&str>,
args: ResolveVisualizationArgs,
) -> i32 {
let body = if let Some(file) = args.request_file.as_deref() {
match read_json_from_file(file) {
Ok(v) => v,
Err(e) => exit_error(
&e,
Some("Provide a valid JSON payload for /v1/agent/visualization/resolve."),
),
}
} else {
let task_intent = match args.task_intent {
Some(intent) if !intent.trim().is_empty() => intent,
_ => exit_error(
"task_intent is required unless --request-file is used.",
Some("Use --task-intent or provide --request-file."),
),
};
let mut body = json!({
"task_intent": task_intent,
"allow_rich_rendering": args.allow_rich_rendering
});
if let Some(mode) = args.user_preference_override {
body["user_preference_override"] = json!(mode);
}
if let Some(complexity) = args.complexity_hint {
body["complexity_hint"] = json!(complexity);
}
if let Some(session_id) = args.telemetry_session_id {
body["telemetry_session_id"] = json!(session_id);
}
if let Some(spec_file) = args.spec_file.as_deref() {
let spec = match read_json_from_file(spec_file) {
Ok(v) => v,
Err(e) => exit_error(&e, Some("Provide a valid JSON visualization_spec payload.")),
};
body["visualization_spec"] = spec;
}
body
};
api_request(
api_url,
reqwest::Method::POST,
"/v1/agent/visualization/resolve",
token,
Some(body),
&[],
&[],
false,
false,
)
.await
}
fn parse_method(raw: &str) -> reqwest::Method {
match raw.to_uppercase().as_str() {
"GET" => reqwest::Method::GET,
"POST" => reqwest::Method::POST,
"PUT" => reqwest::Method::PUT,
"DELETE" => reqwest::Method::DELETE,
"PATCH" => reqwest::Method::PATCH,
"HEAD" => reqwest::Method::HEAD,
"OPTIONS" => reqwest::Method::OPTIONS,
other => exit_error(
&format!("Unknown HTTP method: {other}"),
Some("Supported methods: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS"),
),
}
}
fn normalize_agent_path(raw: &str) -> String {
let trimmed = raw.trim();
if trimmed.is_empty() {
exit_error(
"Agent path must not be empty.",
Some("Use relative path like 'context' or absolute path '/v1/agent/context'."),
);
}
if trimmed.starts_with("/v1/agent") || trimmed.starts_with("/v2/agent") {
return trimmed.to_string();
}
if trimmed.starts_with("v1/agent") || trimmed.starts_with("v2/agent") {
return format!("/{trimmed}");
}
if trimmed.starts_with('/') {
exit_error(
&format!("Invalid agent path '{trimmed}'."),
Some(
"`kura agent request` only supports /v1/agent/* or /v2/agent/* paths. Use `kura api` for other endpoints.",
),
);
}
format!("/v1/agent/{}", trimmed.trim_start_matches('/'))
}
fn parse_query_pairs(raw: &[String]) -> Vec<(String, String)> {
raw.iter()
.map(|entry| {
entry.split_once('=').map_or_else(
|| {
exit_error(
&format!("Invalid query parameter: '{entry}'"),
Some("Format: key=value, e.g. --query event_type=set.logged"),
)
},
|(k, v)| (k.to_string(), v.to_string()),
)
})
.collect()
}
fn build_context_query(
exercise_limit: Option<u32>,
strength_limit: Option<u32>,
custom_limit: Option<u32>,
task_intent: Option<String>,
include_system: Option<bool>,
budget_tokens: Option<u32>,
) -> Vec<(String, String)> {
let mut query = Vec::new();
if let Some(v) = exercise_limit {
query.push(("exercise_limit".to_string(), v.to_string()));
}
if let Some(v) = strength_limit {
query.push(("strength_limit".to_string(), v.to_string()));
}
if let Some(v) = custom_limit {
query.push(("custom_limit".to_string(), v.to_string()));
}
if let Some(v) = task_intent {
query.push(("task_intent".to_string(), v));
}
if let Some(v) = include_system {
query.push(("include_system".to_string(), v.to_string()));
}
if let Some(v) = budget_tokens {
query.push(("budget_tokens".to_string(), v.to_string()));
}
query
}
fn build_section_fetch_query(
section: String,
limit: Option<u32>,
cursor: Option<String>,
fields: Option<String>,
task_intent: Option<String>,
) -> Vec<(String, String)> {
let section = section.trim();
if section.is_empty() {
exit_error(
"section must not be empty",
Some("Provide --section using an id from /v1/agent/context/section-index"),
);
}
let mut query = vec![("section".to_string(), section.to_string())];
if let Some(v) = limit {
query.push(("limit".to_string(), v.to_string()));
}
if let Some(v) = cursor {
query.push(("cursor".to_string(), v));
}
if let Some(v) = fields {
query.push(("fields".to_string(), v));
}
if let Some(v) = task_intent {
query.push(("task_intent".to_string(), v));
}
query
}
fn parse_headers(raw: &[String]) -> Vec<(String, String)> {
raw.iter()
.map(|entry| {
entry.split_once(':').map_or_else(
|| {
exit_error(
&format!("Invalid header: '{entry}'"),
Some("Format: Key:Value, e.g. --header Content-Type:application/json"),
)
},
|(k, v)| (k.trim().to_string(), v.trim().to_string()),
)
})
.collect()
}
fn resolve_body(data: Option<&str>, data_file: Option<&str>) -> Option<serde_json::Value> {
if let Some(raw) = data {
match serde_json::from_str(raw) {
Ok(v) => return Some(v),
Err(e) => exit_error(
&format!("Invalid JSON in --data: {e}"),
Some("Provide valid JSON string"),
),
}
}
if let Some(file) = data_file {
return match read_json_from_file(file) {
Ok(v) => Some(v),
Err(e) => exit_error(&e, Some("Provide a valid JSON file or use '-' for stdin")),
};
}
None
}
fn read_text_from_file(path: &str, docs_hint: &str) -> String {
let raw = if path == "-" {
let mut buffer = String::new();
std::io::stdin()
.read_to_string(&mut buffer)
.unwrap_or_else(|error| {
exit_error(&format!("Failed to read stdin: {error}"), Some(docs_hint))
});
buffer
} else {
std::fs::read_to_string(path).unwrap_or_else(|error| {
exit_error(
&format!("Failed to read file '{path}': {error}"),
Some(docs_hint),
)
})
};
let trimmed = raw.trim();
if trimmed.is_empty() {
exit_error("log-turn message must not be empty", Some(docs_hint));
}
trimmed.to_string()
}
fn resolve_log_turn_message(message: Option<&str>, message_file: Option<&str>) -> String {
if let Some(path) = message_file {
return read_text_from_file(
path,
"Use MESSAGE directly or provide --message-file with raw text (use '-' for stdin).",
);
}
let message = message
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| {
exit_error(
"MESSAGE must not be empty",
Some("Pass the raw user turn directly, for example: `kura log \"bench 4x5 80\"`."),
)
});
message.to_string()
}
fn resolve_log_training_request(data: Option<&str>, request_file: Option<&str>) -> Value {
match (data, request_file) {
(Some(raw), None) => serde_json::from_str::<Value>(raw).unwrap_or_else(|error| {
exit_error(
&format!("failed to parse --data as JSON: {error}"),
Some("Pass a full routine training JSON object."),
)
}),
(None, Some(path)) => read_json_from_file(path).unwrap_or_else(|error| {
exit_error(
&format!("failed to read log-training request file: {error}"),
Some("Pass a JSON object file, or use --data with inline JSON."),
)
}),
_ => exit_error(
"provide exactly one of --data or --request-file",
Some("Use `kura log --request-file payload.json` for routine training logging."),
),
}
}
fn normalize_non_empty_arg(raw: Option<&str>, field_name: &str) -> Option<String> {
raw.map(str::trim)
.filter(|value| !value.is_empty())
.map(str::to_string)
.or_else(|| {
if raw.is_some() {
exit_error(
&format!("{field_name} must not be empty"),
Some("Remove the flag or provide a non-empty value."),
);
}
None
})
}
fn normalize_rfc3339_arg(raw: Option<&str>, field_name: &str) -> Option<String> {
let normalized = normalize_non_empty_arg(raw, field_name)?;
let parsed = DateTime::parse_from_rfc3339(&normalized).unwrap_or_else(|error| {
exit_error(
&format!("{field_name} must be RFC3339: {error}"),
Some("Example: 2026-03-15T09:30:00Z"),
)
});
Some(parsed.with_timezone(&Utc).to_rfc3339())
}
fn load_full_request(path: &str) -> serde_json::Value {
let mut payload = match read_json_from_file(path) {
Ok(v) => v,
Err(e) => exit_error(
&e,
Some(
"Provide JSON with events, read_after_write_targets, and optional structured-write fields such as verify_timeout_ms or clarification_resolutions.",
),
),
};
if payload
.get("events")
.and_then(|value| value.as_array())
.is_none()
{
exit_error(
"request payload must include an events array",
Some(
"Use --request-file with {\"events\": [...], \"read_after_write_targets\": [...]} and optional clarification_resolutions/high_impact_confirmation fields.",
),
);
}
if payload
.get("read_after_write_targets")
.and_then(|value| value.as_array())
.is_none()
{
let conversation_draft_mode = payload
.get("conversation_draft")
.and_then(|value| value.get("mode"))
.and_then(Value::as_str)
.map(|value| value.trim().to_ascii_lowercase());
if conversation_draft_mode.as_deref() == Some("append") {
payload["read_after_write_targets"] = json!([]);
return payload;
}
exit_error(
"request payload must include read_after_write_targets array",
Some("Set read_after_write_targets to [{\"projection_type\":\"...\",\"key\":\"...\"}]"),
);
}
payload
}
fn unwrap_resume_payload<'a>(payload: &'a Value) -> &'a Value {
if payload
.get("schema_version")
.and_then(Value::as_str)
.is_some_and(|value| value == "write_preflight.v1")
{
return payload;
}
if let Some(body) = payload.get("body") {
return unwrap_resume_payload(body);
}
if let Some(received) = payload.get("received") {
return unwrap_resume_payload(received);
}
payload
}
fn extract_resume_clarification_prompts(payload: &Value) -> Vec<ResumeClarificationPrompt> {
let root = unwrap_resume_payload(payload);
root.get("blockers")
.and_then(Value::as_array)
.into_iter()
.flatten()
.filter_map(|blocker| blocker.get("details"))
.filter_map(|details| details.get("clarification_prompts"))
.filter_map(Value::as_array)
.flatten()
.filter_map(|prompt| {
let prompt_id = prompt.get("prompt_id")?.as_str()?;
let prompt_id = Uuid::parse_str(prompt_id).ok()?;
let scope_kind = prompt.get("scope_kind")?.as_str()?.trim().to_string();
let accepted_resolution_fields = prompt
.get("accepted_resolution_fields")
.and_then(Value::as_array)
.map(|fields| {
fields
.iter()
.filter_map(Value::as_str)
.map(str::trim)
.filter(|field| !field.is_empty())
.map(str::to_string)
.collect::<Vec<_>>()
})
.unwrap_or_default();
Some(ResumeClarificationPrompt {
prompt_id,
scope_kind,
accepted_resolution_fields,
})
})
.collect()
}
fn select_resume_clarification_prompt(
prompts: &[ResumeClarificationPrompt],
explicit_prompt_id: Option<Uuid>,
) -> Result<ResumeClarificationPrompt, String> {
if prompts.is_empty() {
return Err(
"resume_file does not contain a clarification_required blocker with clarification_prompts"
.to_string(),
);
}
if let Some(prompt_id) = explicit_prompt_id {
return prompts
.iter()
.find(|prompt| prompt.prompt_id == prompt_id)
.cloned()
.ok_or_else(|| {
format!("resume_file does not contain clarification prompt {prompt_id}")
});
}
if prompts.len() == 1 {
return Ok(prompts[0].clone());
}
Err(
"resume_file contains multiple clarification prompts; provide --clarification-prompt-id"
.to_string(),
)
}
fn load_resume_clarification_prompt(
resume_file: &str,
explicit_prompt_id: Option<Uuid>,
) -> ResumeClarificationPrompt {
let payload = read_json_from_file(resume_file).unwrap_or_else(|error| {
exit_error(
&error,
Some("Provide the blocked structured-write response JSON via --resume-file."),
)
});
let prompts = extract_resume_clarification_prompts(&payload);
select_resume_clarification_prompt(&prompts, explicit_prompt_id).unwrap_or_else(|error| {
exit_error(
&error,
Some(
"Use the prior blocked structured-write response body, or pass --clarification-prompt-id when multiple prompts are present.",
),
)
})
}
async fn build_request_from_events_and_targets(
api_url: &str,
token: Option<&str>,
events_file: &str,
raw_targets: &[String],
verify_timeout_ms: Option<u64>,
intent_goal: Option<&str>,
intent_handshake_file: Option<&str>,
high_impact_confirmation_token: Option<&str>,
high_impact_confirmation_file: Option<&str>,
non_trivial_confirmation_token: Option<&str>,
non_trivial_confirmation_file: Option<&str>,
clarification_resolution_files: &[String],
resume_file: Option<&str>,
clarification_prompt_id: Option<Uuid>,
clarification_route_family: Option<&str>,
clarification_protocol_variant: Option<&str>,
clarification_note: Option<&str>,
session_status: Option<SessionCompletionStatus>,
conversation_draft_mode: Option<ConversationDraftMode>,
) -> serde_json::Value {
if raw_targets.is_empty() && conversation_draft_mode != Some(ConversationDraftMode::Append) {
exit_error(
"--target is required when --request-file is not used",
Some("Repeat --target projection_type:key for read-after-write checks."),
);
}
let parsed_targets = parse_targets(raw_targets);
let events_payload = match read_json_from_file(events_file) {
Ok(v) => v,
Err(e) => exit_error(
&e,
Some("Provide --events-file as JSON array or object with events array."),
),
};
let events = extract_events_array(events_payload);
let intent_handshake =
resolve_intent_handshake(api_url, token, &events, intent_goal, intent_handshake_file).await;
let high_impact_confirmation = resolve_high_impact_confirmation(
high_impact_confirmation_token,
high_impact_confirmation_file,
);
let non_trivial_confirmation = resolve_non_trivial_confirmation(
non_trivial_confirmation_token,
non_trivial_confirmation_file,
);
let clarification_resolutions = resolve_clarification_resolutions(
clarification_resolution_files,
resume_file,
clarification_prompt_id,
clarification_route_family,
clarification_protocol_variant,
clarification_note,
);
build_write_with_proof_request(
events,
parsed_targets,
verify_timeout_ms,
intent_handshake,
high_impact_confirmation,
non_trivial_confirmation,
clarification_resolutions,
session_status,
conversation_draft_mode,
)
}
fn resolve_optional_object_file(
confirmation_file: Option<&str>,
field_name: &str,
docs_hint: &str,
) -> Option<serde_json::Value> {
if let Some(path) = confirmation_file {
let payload = match read_json_from_file(path) {
Ok(v) => v,
Err(e) => exit_error(&e, Some(docs_hint)),
};
if !payload.is_object() {
exit_error(
&format!("{field_name} payload must be a JSON object"),
Some(docs_hint),
);
}
return Some(payload);
}
None
}
fn build_confirmation_payload(
schema_version: &str,
confirmation_token: &str,
docs_hint: &str,
) -> serde_json::Value {
let token = confirmation_token.trim();
if token.is_empty() {
exit_error(
&format!("{schema_version} confirmation token must not be empty"),
Some(docs_hint),
);
}
json!({
"schema_version": schema_version,
"confirmed": true,
"confirmed_at": Utc::now().to_rfc3339(),
"confirmation_token": token,
})
}
fn resolve_non_trivial_confirmation(
confirmation_token: Option<&str>,
confirmation_file: Option<&str>,
) -> Option<serde_json::Value> {
resolve_optional_object_file(
confirmation_file,
"non_trivial_confirmation",
"Provide a valid JSON object for non_trivial_confirmation.v1.",
)
.or_else(|| confirmation_token.map(build_non_trivial_confirmation_from_token))
}
fn resolve_high_impact_confirmation(
confirmation_token: Option<&str>,
confirmation_file: Option<&str>,
) -> Option<serde_json::Value> {
resolve_optional_object_file(
confirmation_file,
"high_impact_confirmation",
"Provide a valid JSON object for high_impact_confirmation.v1.",
)
.or_else(|| confirmation_token.map(build_high_impact_confirmation_from_token))
}
fn build_non_trivial_confirmation_from_token(confirmation_token: &str) -> serde_json::Value {
build_confirmation_payload(
"non_trivial_confirmation.v1",
confirmation_token,
"Use the confirmation token from claim_guard.non_trivial_confirmation_challenge.",
)
}
fn build_high_impact_confirmation_from_token(confirmation_token: &str) -> serde_json::Value {
build_confirmation_payload(
"high_impact_confirmation.v1",
confirmation_token,
"Use the confirmation token from the prior high-impact confirm-first response.",
)
}
fn resolve_clarification_resolutions(
clarification_resolution_files: &[String],
resume_file: Option<&str>,
clarification_prompt_id: Option<Uuid>,
clarification_route_family: Option<&str>,
clarification_protocol_variant: Option<&str>,
clarification_note: Option<&str>,
) -> Option<Vec<serde_json::Value>> {
let mut resolutions = Vec::new();
for path in clarification_resolution_files {
let payload = match read_json_from_file(path) {
Ok(v) => v,
Err(e) => exit_error(
&e,
Some("Provide a valid JSON object or array for clarification_resolutions entries."),
),
};
match payload {
Value::Object(_) => resolutions.push(payload),
Value::Array(entries) => {
if entries.is_empty() {
exit_error(
"clarification_resolution_file must not contain an empty array",
Some("Provide one resolution object or an array of resolution objects."),
);
}
for (index, entry) in entries.into_iter().enumerate() {
if !entry.is_object() {
exit_error(
&format!(
"clarification_resolution_file entry {index} must be an object"
),
Some(
"Each clarification_resolutions entry must be a JSON object matching the server schema.",
),
);
}
resolutions.push(entry);
}
}
_ => exit_error(
"clarification_resolution_file must contain a JSON object or array",
Some(
"Provide one resolution object or an array of resolution objects matching AgentLoggingClarificationResolution.",
),
),
}
}
let route_family =
normalize_non_empty_arg(clarification_route_family, "--clarification-route-family");
let protocol_variant = normalize_non_empty_arg(
clarification_protocol_variant,
"--clarification-protocol-variant",
);
let resolution_note = normalize_non_empty_arg(clarification_note, "--clarification-note");
let inline_requested = resume_file.is_some()
|| clarification_prompt_id.is_some()
|| route_family.is_some()
|| protocol_variant.is_some()
|| resolution_note.is_some();
if inline_requested {
if route_family.is_none() && protocol_variant.is_none() && resolution_note.is_none() {
exit_error(
"Clarification retry requires an answer field.",
Some(
"Use --clarification-route-family or --clarification-protocol-variant, optionally plus --clarification-note.",
),
);
}
let prompt = if let Some(path) = resume_file {
load_resume_clarification_prompt(path, clarification_prompt_id)
} else {
ResumeClarificationPrompt {
prompt_id: clarification_prompt_id.unwrap_or_else(|| {
exit_error(
"Clarification retry requires --clarification-prompt-id when --resume-file is not used.",
Some("Reuse the blocked response via --resume-file, or pass the prompt UUID directly."),
)
}),
scope_kind: String::new(),
accepted_resolution_fields: Vec::new(),
}
};
if prompt
.accepted_resolution_fields
.iter()
.any(|field| field == "resolved_route_family")
&& route_family.is_none()
{
exit_error(
"Clarification retry requires --clarification-route-family for this prompt.",
Some(
"Use the exact route-family answer the user provided, for example training_execution.",
),
);
}
if prompt
.accepted_resolution_fields
.iter()
.any(|field| field == "protocol_variant")
&& protocol_variant.is_none()
{
exit_error(
"Clarification retry requires --clarification-protocol-variant for this prompt.",
Some("Use the exact protocol variant the user provided, for example free_arms."),
);
}
let mut resolution = json!({
"schema_version": CLARIFICATION_RESOLUTION_SCHEMA_VERSION,
"prompt_id": prompt.prompt_id,
});
if let Some(route_family) = route_family {
resolution["resolved_route_family"] = json!(route_family);
}
if let Some(protocol_variant) = protocol_variant {
resolution["protocol_variant"] = json!(protocol_variant);
}
if let Some(resolution_note) = resolution_note {
resolution["resolution_note"] = json!(resolution_note);
}
resolutions.push(resolution);
}
if resolutions.is_empty() {
None
} else {
Some(resolutions)
}
}
const PLAN_UPDATE_VOLUME_DELTA_HIGH_IMPACT_ABS_GTE: f64 = 15.0;
const PLAN_UPDATE_INTENSITY_DELTA_HIGH_IMPACT_ABS_GTE: f64 = 10.0;
const PLAN_UPDATE_FREQUENCY_DELTA_HIGH_IMPACT_ABS_GTE: f64 = 2.0;
const PLAN_UPDATE_DURATION_DELTA_WEEKS_HIGH_IMPACT_ABS_GTE: f64 = 2.0;
const SCHEDULE_EXCEPTION_LOW_IMPACT_MAX_RANGE_DAYS: i64 = 14;
fn normalized_event_type(event: &Value) -> Option<String> {
event
.get("event_type")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.map(|value| value.to_lowercase())
}
fn is_always_high_impact_event_type(event_type: &str) -> bool {
matches!(
event_type.trim().to_lowercase().as_str(),
"training_plan.created"
| "training_plan.archived"
| "projection_rule.created"
| "projection_rule.archived"
| "weight_target.set"
| "sleep_target.set"
| "nutrition_target.set"
| "workflow.onboarding.closed"
| "workflow.onboarding.override_granted"
| "workflow.onboarding.aborted"
| "workflow.onboarding.restarted"
)
}
fn read_abs_f64(value: Option<&Value>) -> Option<f64> {
let raw = value?;
if let Some(number) = raw.as_f64() {
return Some(number.abs());
}
if let Some(number) = raw.as_i64() {
return Some((number as f64).abs());
}
if let Some(number) = raw.as_u64() {
return Some((number as f64).abs());
}
raw.as_str()
.and_then(|text| text.trim().parse::<f64>().ok())
.map(f64::abs)
}
fn read_plan_delta_abs(data: &Value, keys: &[&str]) -> Option<f64> {
for key in keys {
if let Some(number) = read_abs_f64(data.get(*key)) {
return Some(number);
}
if let Some(number) = read_abs_f64(data.get("delta").and_then(|delta| delta.get(*key))) {
return Some(number);
}
}
None
}
fn read_bool_like(value: Option<&Value>) -> Option<bool> {
let raw = value?;
if let Some(boolean) = raw.as_bool() {
return Some(boolean);
}
if let Some(number) = raw.as_i64() {
return match number {
0 => Some(false),
1 => Some(true),
_ => None,
};
}
raw.as_str()
.and_then(|text| match text.trim().to_lowercase().as_str() {
"true" | "yes" | "ja" | "1" | "on" | "active" => Some(true),
"false" | "no" | "nein" | "0" | "off" | "inactive" => Some(false),
_ => None,
})
}
fn parse_local_date_value(value: Option<&Value>) -> Option<chrono::NaiveDate> {
value
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.and_then(|value| chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d").ok())
}
fn selector_has_explicit_occurrence_anchor(selector: &Value) -> bool {
selector
.get("occurrence_id")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.is_some()
|| selector
.get("occurrence_ids")
.and_then(Value::as_array)
.map(|values| {
values.iter().any(|value| {
value
.as_str()
.map(str::trim)
.filter(|raw| !raw.is_empty())
.is_some()
})
})
.unwrap_or(false)
}
fn selector_has_bounded_temporal_anchor(selector: &Value) -> bool {
if selector_has_explicit_occurrence_anchor(selector) {
return true;
}
if parse_local_date_value(selector.get("local_date").or_else(|| selector.get("date"))).is_some()
{
return true;
}
if selector
.get("local_dates")
.and_then(Value::as_array)
.map(|values| {
values
.iter()
.any(|value| parse_local_date_value(Some(value)).is_some())
})
.unwrap_or(false)
{
return true;
}
if parse_local_date_value(selector.get("week_of")).is_some() {
return true;
}
let date_range = selector
.get("date_range")
.or_else(|| selector.get("between"))
.unwrap_or(&Value::Null);
let start = parse_local_date_value(date_range.get("start").or_else(|| date_range.get("from")));
let end = parse_local_date_value(date_range.get("end").or_else(|| date_range.get("to")));
match (start, end) {
(Some(start), Some(end)) if end >= start => {
(end - start).num_days() <= SCHEDULE_EXCEPTION_LOW_IMPACT_MAX_RANGE_DAYS
}
_ => false,
}
}
fn schedule_exception_scope_is_high_impact(data: &Value) -> bool {
let scope_value = data
.get("change_scope")
.or_else(|| data.get("update_scope"))
.or_else(|| {
data.get("scope")
.and_then(|scope| scope.get("change_scope"))
})
.or_else(|| data.get("scope").and_then(|scope| scope.get("scope")))
.and_then(Value::as_str)
.map(|raw| raw.trim().to_lowercase());
if matches!(
scope_value.as_deref(),
Some(
"bulk"
| "future_block"
| "full_rewrite"
| "template_rewrite"
| "replace_future_schedule"
| "mesocycle_reset"
| "phase_shift"
)
) {
return true;
}
for key in ["days_affected", "occurrences_affected"] {
if read_abs_f64(data.get("scope").and_then(|scope| scope.get(key))).unwrap_or(0.0)
> SCHEDULE_EXCEPTION_LOW_IMPACT_MAX_RANGE_DAYS as f64
{
return true;
}
}
if read_abs_f64(
data.get("scope")
.and_then(|scope| scope.get("weeks_affected")),
)
.unwrap_or(0.0)
> 2.0
{
return true;
}
false
}
fn training_schedule_exception_is_high_impact(event_type: &str, data: &Value) -> bool {
if read_bool_like(data.get("requires_explicit_confirmation")).unwrap_or(false)
|| read_bool_like(data.get("rewrite_template")).unwrap_or(false)
|| read_bool_like(data.get("replace_future_schedule")).unwrap_or(false)
|| read_bool_like(data.get("replace_entire_weekly_template")).unwrap_or(false)
|| read_bool_like(data.get("clear_all")).unwrap_or(false)
|| schedule_exception_scope_is_high_impact(data)
{
return true;
}
match event_type {
"training_schedule.exception.cleared" => false,
"training_schedule.exception.upsert" => {
let selector = data.get("selector").unwrap_or(&Value::Null);
!selector_has_bounded_temporal_anchor(selector)
}
_ => true,
}
}
fn training_plan_update_is_high_impact(data: &Value) -> bool {
let scope = data
.get("change_scope")
.or_else(|| data.get("update_scope"))
.and_then(Value::as_str)
.map(|raw| raw.trim().to_lowercase());
if matches!(
scope.as_deref(),
Some(
"full_rewrite" | "structural" | "major_adjustment" | "mesocycle_reset" | "phase_shift"
)
) {
return true;
}
if data
.get("replace_entire_plan")
.and_then(Value::as_bool)
.unwrap_or(false)
|| data
.get("archive_previous_plan")
.and_then(Value::as_bool)
.unwrap_or(false)
|| data
.get("requires_explicit_confirmation")
.and_then(Value::as_bool)
.unwrap_or(false)
{
return true;
}
let volume_delta = read_plan_delta_abs(
data,
&[
"volume_delta_pct",
"planned_volume_delta_pct",
"total_volume_delta_pct",
],
)
.unwrap_or(0.0);
if volume_delta >= PLAN_UPDATE_VOLUME_DELTA_HIGH_IMPACT_ABS_GTE {
return true;
}
let intensity_delta = read_plan_delta_abs(
data,
&[
"intensity_delta_pct",
"rir_delta",
"rpe_delta",
"effort_delta_pct",
],
)
.unwrap_or(0.0);
if intensity_delta >= PLAN_UPDATE_INTENSITY_DELTA_HIGH_IMPACT_ABS_GTE {
return true;
}
let frequency_delta = read_plan_delta_abs(
data,
&["frequency_delta_per_week", "sessions_per_week_delta"],
)
.unwrap_or(0.0);
if frequency_delta >= PLAN_UPDATE_FREQUENCY_DELTA_HIGH_IMPACT_ABS_GTE {
return true;
}
let duration_delta = read_plan_delta_abs(
data,
&["cycle_length_weeks_delta", "plan_duration_weeks_delta"],
)
.unwrap_or(0.0);
duration_delta >= PLAN_UPDATE_DURATION_DELTA_WEEKS_HIGH_IMPACT_ABS_GTE
}
fn is_high_impact_event(event: &Value) -> bool {
let Some(event_type) = normalized_event_type(event) else {
return false;
};
if event_type == "training_plan.updated" {
return event
.get("data")
.is_some_and(training_plan_update_is_high_impact);
}
if event_type == "training_schedule.exception.upsert"
|| event_type == "training_schedule.exception.cleared"
{
return event
.get("data")
.is_some_and(|data| training_schedule_exception_is_high_impact(&event_type, data));
}
is_always_high_impact_event_type(&event_type)
}
fn has_high_impact_events(events: &[Value]) -> bool {
events.iter().any(is_high_impact_event)
}
fn extract_temporal_basis_from_context_body(body: &Value) -> Option<Value> {
body.pointer("/meta/temporal_basis")
.cloned()
.filter(|value| value.is_object())
}
async fn fetch_temporal_basis_for_high_impact_write(
api_url: &str,
token: Option<&str>,
) -> serde_json::Value {
let query = vec![
("include_system".to_string(), "false".to_string()),
("budget_tokens".to_string(), "400".to_string()),
];
let (status, body) = raw_api_request_with_query(
api_url,
reqwest::Method::GET,
"/v1/agent/context",
token,
&query,
)
.await
.unwrap_or_else(|error| {
exit_error(
&format!("Failed to fetch /v1/agent/context for temporal_basis: {error}"),
Some(
"Retry once the API is reachable, or pass a full --intent-handshake-file payload.",
),
)
});
if !(200..=299).contains(&status) {
exit_error(
&format!(
"GET /v1/agent/context returned HTTP {status} while preparing temporal_basis."
),
Some(
"Use `kura agent context` to inspect the failure, or pass --intent-handshake-file.",
),
);
}
extract_temporal_basis_from_context_body(&body).unwrap_or_else(|| {
exit_error(
"agent context response is missing meta.temporal_basis",
Some(
"Retry after `kura agent context` succeeds, or pass a full --intent-handshake-file payload.",
),
)
})
}
fn build_default_intent_handshake(
events: &[serde_json::Value],
intent_goal: Option<&str>,
temporal_basis: serde_json::Value,
) -> serde_json::Value {
let event_types: Vec<String> = events.iter().filter_map(normalized_event_type).collect();
let planned_action = if event_types.is_empty() {
"apply high-impact structured write update".to_string()
} else {
format!("write events: {}", event_types.join(", "))
};
json!({
"schema_version": "intent_handshake.v1",
"goal": intent_goal.unwrap_or("execute requested high-impact write safely"),
"planned_action": planned_action,
"assumptions": ["context and request intent are current"],
"non_goals": ["no unrelated writes outside current task scope"],
"impact_class": "high_impact_write",
"success_criteria": "structured write returns verification and claim_guard for this action",
"created_at": chrono::Utc::now().to_rfc3339(),
"handshake_id": format!("cli-hs-{}", Uuid::now_v7()),
"temporal_basis": temporal_basis,
})
}
async fn resolve_intent_handshake(
api_url: &str,
token: Option<&str>,
events: &[serde_json::Value],
intent_goal: Option<&str>,
intent_handshake_file: Option<&str>,
) -> Option<serde_json::Value> {
if let Some(payload) = resolve_optional_object_file(
intent_handshake_file,
"intent_handshake",
"Provide a valid JSON object for intent_handshake.v1.",
) {
return Some(payload);
}
if !has_high_impact_events(events) {
return None;
}
let temporal_basis = fetch_temporal_basis_for_high_impact_write(api_url, token).await;
Some(build_default_intent_handshake(
events,
intent_goal,
temporal_basis,
))
}
fn parse_targets(raw_targets: &[String]) -> Vec<serde_json::Value> {
raw_targets
.iter()
.map(|raw| {
let (projection_type, key) = raw.split_once(':').unwrap_or_else(|| {
exit_error(
&format!("Invalid --target '{raw}'"),
Some("Use format projection_type:key, e.g. user_profile:me"),
)
});
let projection_type = projection_type.trim();
let key = key.trim();
if projection_type.is_empty() || key.is_empty() {
exit_error(
&format!("Invalid --target '{raw}'"),
Some("projection_type and key must both be non-empty."),
);
}
json!({
"projection_type": projection_type,
"key": key,
})
})
.collect()
}
fn extract_events_array(events_payload: serde_json::Value) -> Vec<serde_json::Value> {
if let Some(events) = events_payload.as_array() {
return events.to_vec();
}
if let Some(events) = events_payload
.get("events")
.and_then(|value| value.as_array())
{
return events.to_vec();
}
exit_error(
"events payload must be an array or object with events array",
Some("Example: --events-file events.json where file is [{...}] or {\"events\": [{...}]}"),
);
}
fn build_write_with_proof_request(
events: Vec<serde_json::Value>,
parsed_targets: Vec<serde_json::Value>,
verify_timeout_ms: Option<u64>,
intent_handshake: Option<serde_json::Value>,
high_impact_confirmation: Option<serde_json::Value>,
non_trivial_confirmation: Option<serde_json::Value>,
clarification_resolutions: Option<Vec<serde_json::Value>>,
session_status: Option<SessionCompletionStatus>,
conversation_draft_mode: Option<ConversationDraftMode>,
) -> serde_json::Value {
let mut request = json!({
"events": events,
"read_after_write_targets": parsed_targets,
});
if let Some(timeout) = verify_timeout_ms {
request["verify_timeout_ms"] = json!(timeout);
}
if let Some(intent_handshake) = intent_handshake {
request["intent_handshake"] = intent_handshake;
}
if let Some(high_impact_confirmation) = high_impact_confirmation {
request["high_impact_confirmation"] = high_impact_confirmation;
}
if let Some(non_trivial_confirmation) = non_trivial_confirmation {
request["non_trivial_confirmation"] = non_trivial_confirmation;
}
if let Some(clarification_resolutions) = clarification_resolutions {
request["clarification_resolutions"] = json!(clarification_resolutions);
}
if let Some(session_status) = session_status {
request["session_completion"] = json!({
"schema_version": "training_session_completion.v1",
"status": session_status.as_str(),
});
}
if let Some(conversation_draft_mode) = conversation_draft_mode {
request["conversation_draft"] = json!({
"schema_version": "agent_conversation_session_draft.v1",
"mode": conversation_draft_mode.as_str(),
});
}
request
}
#[cfg(test)]
mod tests {
use super::{
ConversationDraftMode, LogTrainingArgs, LogTurnArgs, ResumeClarificationPrompt,
SaveConfirmationMode, SessionCompletionStatus, build_context_query,
build_default_intent_handshake, build_high_impact_confirmation_from_token,
build_log_training_request, build_log_turn_request, build_logging_bootstrap_output,
build_non_trivial_confirmation_from_token, build_section_fetch_query,
build_write_with_proof_request, extract_events_array, extract_logging_bootstrap_contract,
extract_preferred_structured_write_endpoint, extract_resume_clarification_prompts,
extract_temporal_basis_from_context_body, has_high_impact_events, normalize_agent_path,
parse_method, parse_targets, select_resume_clarification_prompt,
};
use serde_json::json;
use uuid::Uuid;
#[test]
fn normalize_agent_path_accepts_relative_path() {
assert_eq!(
normalize_agent_path("evidence/event/abc"),
"/v1/agent/evidence/event/abc"
);
}
#[test]
fn normalize_agent_path_accepts_absolute_agent_path() {
assert_eq!(
normalize_agent_path("/v1/agent/context"),
"/v1/agent/context"
);
}
#[test]
fn normalize_agent_path_accepts_absolute_v2_agent_path() {
assert_eq!(
normalize_agent_path("/v2/agent/write-with-proof"),
"/v2/agent/write-with-proof"
);
}
#[test]
fn extract_preferred_structured_write_endpoint_accepts_v2_only() {
assert_eq!(
extract_preferred_structured_write_endpoint(&json!({
"preferred_structured_write_endpoint": "/v2/agent/write-with-proof"
}))
.as_deref(),
Some("/v2/agent/write-with-proof")
);
assert!(
extract_preferred_structured_write_endpoint(&json!({
"preferred_write_endpoint": "/v1/agent/write-with-proof"
}))
.is_none()
);
assert!(
extract_preferred_structured_write_endpoint(&json!({
"preferred_write_endpoint": "https://bad.example"
}))
.is_none()
);
assert_eq!(
extract_preferred_structured_write_endpoint(&json!({
"preferred_write_endpoint": "/v2/agent/write-with-proof"
}))
.as_deref(),
Some("/v2/agent/write-with-proof")
);
}
#[test]
fn extract_logging_bootstrap_contract_reads_logging_node() {
let capabilities = json!({
"task_bootstrap_contracts": {
"logging": {
"schema_version": "agent_logging_bootstrap_contract.v1",
"task_family": "logging"
}
}
});
let contract =
extract_logging_bootstrap_contract(&capabilities).expect("logging bootstrap contract");
assert_eq!(
contract["schema_version"],
json!("agent_logging_bootstrap_contract.v1")
);
assert_eq!(contract["task_family"], json!("logging"));
}
#[test]
fn build_logging_bootstrap_output_selects_one_intent_recipe() {
let contract = json!({
"schema_version": "agent_logging_bootstrap_contract.v1",
"task_family": "logging",
"bootstrap_surface": "/v1/agent/capabilities",
"intent_recipes": [
{
"intent_id": "log_conversation",
"endpoint": "/v3/agent/evidence",
"cli_entrypoint": "kura log"
}
],
"save_states": [{"save_state": "received"}],
"upgrade_hints": [{"surface": "/v1/events"}],
"integrity_guards": ["guard"]
});
let output = build_logging_bootstrap_output(&contract, Some("log_conversation"))
.expect("bootstrap output");
assert_eq!(
output["intent_recipe"]["intent_id"],
json!("log_conversation")
);
assert_eq!(output["intent_recipe"]["cli_entrypoint"], json!("kura log"));
assert_eq!(output["bootstrap_surface"], json!("/v1/agent/capabilities"));
assert_eq!(output["save_states"][0]["save_state"], json!("received"));
}
#[test]
fn build_log_training_request_uses_dedicated_training_shape() {
let request = build_log_training_request(&LogTrainingArgs {
data: Some(
json!({
"date": "2026-03-20",
"entries": [
{
"block_type": "repetition_sets",
"exercise": {"label": "Back Squat"},
"sets": [{"count": 5, "reps": 5, "weight_kg": 100, "rir": 2}]
}
],
"session_id": "session:2026-03-20-lower"
})
.to_string(),
),
request_file: None,
});
assert_eq!(
request["schema_version"],
json!("routine_training_tool_contract.v1")
);
assert_eq!(request["date"], json!("2026-03-20"));
assert_eq!(
request["entries"][0]["block_type"],
json!("repetition_sets")
);
assert_eq!(
request["entries"][0]["exercise"]["label"],
json!("Back Squat")
);
assert_eq!(request["source_context"]["surface"], json!("cli"));
assert_eq!(
request["source_context"]["command_family"],
json!("log_training")
);
}
#[test]
fn build_log_turn_request_uses_evidence_ingress_shape() {
let request = build_log_turn_request(&LogTurnArgs {
message: Some("bench 4x5 80".to_string()),
message_file: None,
session_id: Some("2026-03-15-upper".to_string()),
modality: None,
recorded_at: Some("2026-03-15T09:30:00+01:00".to_string()),
observed_at: None,
idempotency_key: Some("idem-123".to_string()),
});
assert_eq!(
request["schema_version"],
json!("agent_evidence_ingress_request.v1")
);
assert_eq!(request["text_evidence"], json!("bench 4x5 80"));
assert_eq!(request["modality"], json!("chat_message"));
assert_eq!(
request["session_hint"]["session_id"],
json!("2026-03-15-upper")
);
assert_eq!(request["idempotency_key"], json!("idem-123"));
assert_eq!(request["source"]["command_family"], json!("log_turn"));
}
#[test]
fn extract_resume_clarification_prompts_reads_blocked_response_shape() {
let prompt_id = Uuid::now_v7();
let prompts = extract_resume_clarification_prompts(&json!({
"schema_version": "write_preflight.v1",
"status": "blocked",
"blockers": [
{
"code": "logging_intent_clarification_required",
"details": {
"clarification_prompts": [
{
"prompt_id": prompt_id,
"scope_kind": "training_vs_test",
"accepted_resolution_fields": ["resolved_route_family"]
}
]
}
}
]
}));
assert_eq!(
prompts,
vec![ResumeClarificationPrompt {
prompt_id,
scope_kind: "training_vs_test".to_string(),
accepted_resolution_fields: vec!["resolved_route_family".to_string()],
}]
);
}
#[test]
fn select_resume_clarification_prompt_accepts_single_prompt_without_explicit_id() {
let prompt = ResumeClarificationPrompt {
prompt_id: Uuid::now_v7(),
scope_kind: "training_vs_test".to_string(),
accepted_resolution_fields: vec!["resolved_route_family".to_string()],
};
let selected = select_resume_clarification_prompt(std::slice::from_ref(&prompt), None)
.expect("prompt");
assert_eq!(selected, prompt);
}
#[test]
fn parse_method_accepts_standard_http_methods() {
for method in &[
"get", "GET", "post", "PUT", "delete", "patch", "head", "OPTIONS",
] {
let parsed = parse_method(method);
assert!(!parsed.as_str().is_empty());
}
}
#[test]
fn parse_targets_accepts_projection_type_key_format() {
let parsed = parse_targets(&[
"user_profile:me".to_string(),
"training_timeline:overview".to_string(),
]);
assert_eq!(parsed[0]["projection_type"], "user_profile");
assert_eq!(parsed[0]["key"], "me");
assert_eq!(parsed[1]["projection_type"], "training_timeline");
assert_eq!(parsed[1]["key"], "overview");
}
#[test]
fn extract_events_array_supports_plain_array() {
let events = extract_events_array(json!([
{"event_type":"set.logged"},
{"event_type":"metric.logged"}
]));
assert_eq!(events.len(), 2);
}
#[test]
fn extract_events_array_supports_object_wrapper() {
let events = extract_events_array(json!({
"events": [{"event_type":"set.logged"}]
}));
assert_eq!(events.len(), 1);
}
#[test]
fn build_write_with_proof_request_serializes_expected_fields() {
let request = build_write_with_proof_request(
vec![json!({"event_type":"set.logged"})],
vec![json!({"projection_type":"user_profile","key":"me"})],
Some(1200),
None,
None,
None,
None,
None,
None,
);
assert_eq!(request["events"].as_array().unwrap().len(), 1);
assert_eq!(
request["read_after_write_targets"]
.as_array()
.unwrap()
.len(),
1
);
assert_eq!(request["verify_timeout_ms"], 1200);
}
#[test]
fn build_write_with_proof_request_includes_non_trivial_confirmation_when_present() {
let request = build_write_with_proof_request(
vec![json!({"event_type":"set.logged"})],
vec![json!({"projection_type":"user_profile","key":"me"})],
None,
None,
None,
Some(json!({
"schema_version": "non_trivial_confirmation.v1",
"confirmed": true,
"confirmed_at": "2026-02-25T12:00:00Z",
"confirmation_token": "abc"
})),
None,
None,
None,
);
assert_eq!(
request["non_trivial_confirmation"]["schema_version"],
"non_trivial_confirmation.v1"
);
assert_eq!(
request["non_trivial_confirmation"]["confirmation_token"],
"abc"
);
}
#[test]
fn build_write_with_proof_request_includes_high_impact_fields_when_present() {
let request = build_write_with_proof_request(
vec![json!({"event_type":"training_schedule.exception.upsert"})],
vec![json!({"projection_type":"training_schedule","key":"effective"})],
None,
Some(json!({
"schema_version": "intent_handshake.v1",
"goal": "shift deload start",
"impact_class": "high_impact_write",
"temporal_basis": {
"schema_version": "temporal_basis.v1",
"context_generated_at": "2026-03-07T16:00:00Z",
"timezone": "Europe/Berlin",
"today_local_date": "2026-03-07"
}
})),
Some(json!({
"schema_version": "high_impact_confirmation.v1",
"confirmed": true,
"confirmed_at": "2026-03-07T16:05:00Z",
"confirmation_token": "hi-123"
})),
None,
None,
None,
None,
);
assert_eq!(
request["intent_handshake"]["schema_version"],
"intent_handshake.v1"
);
assert_eq!(
request["high_impact_confirmation"]["confirmation_token"],
"hi-123"
);
}
#[test]
fn build_write_with_proof_request_includes_clarification_resolutions_when_present() {
let request = build_write_with_proof_request(
vec![json!({"event_type":"set.logged"})],
vec![json!({"projection_type":"training_timeline","key":"today"})],
None,
None,
None,
None,
Some(vec![json!({
"schema_version": "logging_clarification_resolution.v1",
"prompt_id": "3f6e2b68-63a6-44c2-a4df-73d80f3b23e0",
"resolved_route_family": "training",
"resolved_at": "2026-03-08T10:00:00Z"
})]),
None,
None,
);
assert_eq!(
request["clarification_resolutions"]
.as_array()
.unwrap()
.len(),
1
);
assert_eq!(
request["clarification_resolutions"][0]["resolved_route_family"],
"training"
);
}
#[test]
fn build_write_with_proof_request_includes_session_completion_when_present() {
let request = build_write_with_proof_request(
vec![json!({"event_type":"set.logged"})],
vec![json!({"projection_type":"training_timeline","key":"today"})],
None,
None,
None,
None,
None,
Some(SessionCompletionStatus::Ongoing),
None,
);
assert_eq!(
request["session_completion"]["schema_version"],
"training_session_completion.v1"
);
assert_eq!(request["session_completion"]["status"], "ongoing");
}
#[test]
fn build_write_with_proof_request_includes_conversation_draft_when_present() {
let request = build_write_with_proof_request(
vec![json!({"event_type":"session.completed"})],
Vec::new(),
None,
None,
None,
None,
None,
Some(SessionCompletionStatus::Ongoing),
Some(ConversationDraftMode::Append),
);
assert_eq!(
request["conversation_draft"]["schema_version"],
"agent_conversation_session_draft.v1"
);
assert_eq!(request["conversation_draft"]["mode"], "append");
}
#[test]
fn build_non_trivial_confirmation_from_token_uses_expected_shape() {
let payload = build_non_trivial_confirmation_from_token("tok-123");
assert_eq!(payload["schema_version"], "non_trivial_confirmation.v1");
assert_eq!(payload["confirmed"], true);
assert_eq!(payload["confirmation_token"], "tok-123");
assert!(payload["confirmed_at"].as_str().is_some());
}
#[test]
fn build_high_impact_confirmation_from_token_uses_expected_shape() {
let payload = build_high_impact_confirmation_from_token("tok-456");
assert_eq!(payload["schema_version"], "high_impact_confirmation.v1");
assert_eq!(payload["confirmed"], true);
assert_eq!(payload["confirmation_token"], "tok-456");
assert!(payload["confirmed_at"].as_str().is_some());
}
#[test]
fn extract_temporal_basis_from_context_body_reads_meta_field() {
let temporal_basis = extract_temporal_basis_from_context_body(&json!({
"meta": {
"temporal_basis": {
"schema_version": "temporal_basis.v1",
"timezone": "Europe/Berlin",
"today_local_date": "2026-03-07"
}
}
}))
.expect("temporal_basis must be extracted");
assert_eq!(temporal_basis["schema_version"], "temporal_basis.v1");
assert_eq!(temporal_basis["timezone"], "Europe/Berlin");
}
#[test]
fn build_default_intent_handshake_uses_event_types_and_temporal_basis() {
let handshake = build_default_intent_handshake(
&[json!({"event_type":"training_schedule.exception.upsert"})],
Some("shift today's session"),
json!({
"schema_version": "temporal_basis.v1",
"context_generated_at": "2026-03-07T16:00:00Z",
"timezone": "Europe/Berlin",
"today_local_date": "2026-03-07"
}),
);
assert_eq!(handshake["schema_version"], "intent_handshake.v1");
assert_eq!(handshake["goal"], "shift today's session");
assert_eq!(handshake["impact_class"], "high_impact_write");
assert_eq!(
handshake["temporal_basis"]["today_local_date"],
"2026-03-07"
);
}
#[test]
fn high_impact_classification_keeps_bounded_schedule_exception_low_impact() {
let events = vec![json!({
"event_type": "training_schedule.exception.upsert",
"data": {
"exception_id": "deload-start-today",
"operation": "patch",
"selector": {
"local_date": "2026-03-07",
"session_name": "Technik + Power"
},
"progression_override": {
"deload_active": true,
"phase": "deload",
"volume_delta_pct": -30
}
}
})];
assert!(!has_high_impact_events(&events));
}
#[test]
fn high_impact_classification_escalates_unbounded_schedule_exception() {
let events = vec![json!({
"event_type": "training_schedule.exception.upsert",
"data": {
"exception_id": "rewrite-future-saturdays",
"operation": "patch",
"selector": {
"session_name": "Technik + Power"
},
"rewrite_template": true
}
})];
assert!(has_high_impact_events(&events));
}
#[test]
fn save_confirmation_mode_serializes_expected_values() {
assert_eq!(SaveConfirmationMode::Auto.as_str(), "auto");
assert_eq!(SaveConfirmationMode::Always.as_str(), "always");
assert_eq!(SaveConfirmationMode::Never.as_str(), "never");
}
#[test]
fn build_context_query_includes_budget_tokens_when_present() {
let query = build_context_query(
Some(3),
Some(2),
Some(1),
Some("readiness check".to_string()),
Some(false),
Some(900),
);
assert!(query.contains(&("exercise_limit".to_string(), "3".to_string())));
assert!(query.contains(&("strength_limit".to_string(), "2".to_string())));
assert!(query.contains(&("custom_limit".to_string(), "1".to_string())));
assert!(query.contains(&("task_intent".to_string(), "readiness check".to_string())));
assert!(query.contains(&("include_system".to_string(), "false".to_string())));
assert!(query.contains(&("budget_tokens".to_string(), "900".to_string())));
}
#[test]
fn build_context_query_supports_section_index_parity_params() {
let query = build_context_query(
Some(5),
Some(5),
Some(10),
Some("startup".to_string()),
Some(false),
Some(1200),
);
assert!(query.contains(&("exercise_limit".to_string(), "5".to_string())));
assert!(query.contains(&("strength_limit".to_string(), "5".to_string())));
assert!(query.contains(&("custom_limit".to_string(), "10".to_string())));
assert!(query.contains(&("task_intent".to_string(), "startup".to_string())));
assert!(query.contains(&("include_system".to_string(), "false".to_string())));
assert!(query.contains(&("budget_tokens".to_string(), "1200".to_string())));
}
#[test]
fn build_section_fetch_query_serializes_optional_params() {
let query = build_section_fetch_query(
"projections.exercise_progression".to_string(),
Some(50),
Some("abc123".to_string()),
Some("data,meta".to_string()),
Some("bench plateau".to_string()),
);
assert_eq!(
query,
vec![
(
"section".to_string(),
"projections.exercise_progression".to_string(),
),
("limit".to_string(), "50".to_string()),
("cursor".to_string(), "abc123".to_string()),
("fields".to_string(), "data,meta".to_string()),
("task_intent".to_string(), "bench plateau".to_string()),
]
);
}
}