mod cli_args;
mod command_handlers;
use axum::{
extract::{DefaultBodyLimit, Path as AxumPath, State},
http::{header, HeaderValue, StatusCode},
response::{IntoResponse, Response},
routing::{get, put},
Json, Router,
};
use chrono::Utc;
use clap::{CommandFactory, Parser};
use clap_complete::generate;
use ed25519_dalek::{Signature, Signer, SigningKey, Verifier, VerifyingKey};
use hmac::{Hmac, Mac};
use rand::rngs::OsRng;
use rmcp::model::{Prompt, Resource, ServerInfo, Tool};
use serde_json::{json, Value};
use sha2::{Digest, Sha256};
use std::fs;
use std::io::BufRead;
use std::io::IsTerminal;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::process::Command as StdCommand;
use std::time::{Duration, Instant};
use std::collections::{HashMap, HashSet};
use tower::limit::ConcurrencyLimitLayer;
use tower_http::limit::RequestBodyLimitLayer;
use cli_args::{
BakeAction, Cli, Commands, DiffOutputFormat, DiscoverAction, InitAction, InspectAction,
McpAction, McpSessionAction, McpSessionCli, ScaffoldAction, SkillsAction,
WatchNotificationTemplate,
};
use command_handlers::{
cmd_api, cmd_skills_info, cmd_skills_list, cmd_skills_run, ApiCommandOptions, SkillListOptions,
};
use sxmc::auth::secrets::{resolve_header, resolve_secret};
use sxmc::bake::config::SourceType;
use sxmc::bake::{BakeConfig, BakeStore};
use sxmc::cli_surfaces::{self, AiClientProfile, AiCoverage, ArtifactMode};
use sxmc::client::{api, codebase, database, graphql, mcp_http, mcp_stdio, openapi, traffic};
use sxmc::discovery_snapshots;
use sxmc::error::Result;
use sxmc::output;
use sxmc::paths::{InstallPaths, InstallScope};
use sxmc::projection::{apply_offset_limit, retain_object_fields};
use sxmc::security;
use sxmc::server::{self, HttpServeLimits};
use sxmc::skills::{discovery, generator, install as skill_install, parser};
const PROFILE_BUNDLE_SCHEMA: &str = "sxmc_profile_bundle_v1";
const PROFILE_CORPUS_SCHEMA: &str = "sxmc_profile_corpus_v1";
const PROFILE_REGISTRY_SCHEMA: &str = "sxmc_profile_registry_v1";
const SYNC_STATE_SCHEMA: &str = "sxmc_sync_state_v1";
const PROFILE_STALE_DAYS: i64 = 30;
const PROFILE_BUNDLE_SIGNATURE_ALGORITHM: &str = "hmac-sha256";
const PROFILE_BUNDLE_SIGNATURE_ALGORITHM_ED25519: &str = "ed25519";
const BAKED_HEALTH_SLOW_MS: u64 = 1_000;
fn resolve_paths(paths: Option<Vec<PathBuf>>) -> Vec<PathBuf> {
paths.unwrap_or_else(discovery::default_paths)
}
fn resolve_skills_install_root(root: Option<PathBuf>) -> Option<PathBuf> {
Some(root.unwrap_or_else(|| std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."))))
}
fn parse_kv_args(args: &[String]) -> serde_json::Map<String, serde_json::Value> {
let mut map = serde_json::Map::new();
for arg in args {
if let Some((key, value)) = arg.split_once('=') {
let val = serde_json::from_str(value)
.unwrap_or_else(|_| serde_json::Value::String(value.to_string()));
map.insert(key.to_string(), val);
}
}
map
}
fn parse_env_vars(vars: &[String]) -> Vec<(String, String)> {
vars.iter()
.filter_map(|v| {
v.split_once('=')
.map(|(k, v)| (k.to_string(), v.to_string()))
})
.collect()
}
fn parse_string_kv_args(args: &[String]) -> HashMap<String, String> {
let mut map = HashMap::new();
for arg in args {
if let Some((key, value)) = arg.split_once('=') {
map.insert(key.to_string(), value.to_string());
}
}
map
}
fn parse_headers(headers: &[String]) -> Result<Vec<(String, String)>> {
headers.iter().map(|h| resolve_header(h)).collect()
}
fn parse_optional_secret(secret: Option<String>) -> Result<Option<String>> {
secret.map(|value| resolve_secret(&value)).transpose()
}
fn parse_timeout(timeout_seconds: Option<u64>) -> Option<Duration> {
timeout_seconds.map(Duration::from_secs)
}
fn print_db_discovery_report(value: &Value) {
println!(
"{} database: {}",
value["database_type"].as_str().unwrap_or("unknown"),
value["source"].as_str().unwrap_or("<unknown>")
);
println!(
"Discovered {} table/view entries",
value["count"].as_u64().unwrap_or(0)
);
if let Some(entries) = value["entries"].as_array() {
for entry in entries {
println!(
"- {} ({}, {} columns, {} foreign keys, {} indexes)",
entry["qualified_name"]
.as_str()
.or_else(|| entry["name"].as_str())
.unwrap_or("<unknown>"),
entry["object_type"].as_str().unwrap_or("object"),
entry["column_count"].as_u64().unwrap_or(0),
entry["foreign_key_count"].as_u64().unwrap_or(0),
entry["index_count"].as_u64().unwrap_or(0)
);
if let Some(columns) = entry["columns"].as_array() {
for column in columns {
println!(
" - {}: {}{}{}",
column["name"].as_str().unwrap_or("<unknown>"),
column["data_type"].as_str().unwrap_or("unknown"),
if column["not_null"].as_bool().unwrap_or(false) {
" not-null"
} else {
""
},
if column["primary_key"].as_bool().unwrap_or(false) {
" primary-key"
} else {
""
}
);
}
}
if let Some(foreign_keys) = entry["foreign_keys"].as_array() {
for foreign_key in foreign_keys {
let references_schema = foreign_key["references_schema"]
.as_str()
.unwrap_or_default();
let references_table = foreign_key["references_table"]
.as_str()
.unwrap_or("<unknown>");
let references_column = foreign_key["references_column"]
.as_str()
.unwrap_or("<unknown>");
let qualified_target = if references_schema.is_empty() {
references_table.to_string()
} else {
format!("{references_schema}.{references_table}")
};
println!(
" - foreign key {} -> {}.{}",
foreign_key["column"].as_str().unwrap_or("<unknown>"),
qualified_target,
references_column
);
}
}
if let Some(indexes) = entry["indexes"].as_array() {
for index in indexes {
println!(
" - index {}",
index["name"].as_str().unwrap_or("<unknown>")
);
}
}
}
}
}
fn print_codebase_discovery_report(value: &Value) {
println!(
"Codebase: {}",
value["root"].as_str().unwrap_or("<unknown>")
);
println!(
"Discovered {} manifests, {} task runners, {} entrypoints, {} configs",
value["manifest_count"].as_u64().unwrap_or(0),
value["task_runner_count"].as_u64().unwrap_or(0),
value["entrypoint_count"].as_u64().unwrap_or(0),
value["config_count"].as_u64().unwrap_or(0)
);
if let Some(project_kinds) = value["project_kinds"].as_array() {
if !project_kinds.is_empty() {
let kinds = project_kinds
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ");
println!("Project kinds: {kinds}");
}
}
if let Some(entrypoints) = value["entrypoints"].as_array() {
for entry in entrypoints {
println!(
"- {} ({})",
entry["name"].as_str().unwrap_or("<unknown>"),
entry["kind"].as_str().unwrap_or("entrypoint")
);
}
}
if let Some(recommended_commands) = value["recommended_commands"].as_array() {
if !recommended_commands.is_empty() {
println!("Recommended commands:");
for command in recommended_commands.iter().take(5) {
println!(" - {}", command["command"].as_str().unwrap_or("<unknown>"));
}
}
}
}
fn print_codebase_diff_report(value: &Value) {
println!(
"Codebase diff: {} -> {}",
value["before_root"].as_str().unwrap_or("<unknown>"),
value["after_root"].as_str().unwrap_or("<unknown>")
);
let mut changed_sections = 0u64;
for field in [
"manifest_count_changed",
"task_runner_count_changed",
"entrypoint_count_changed",
"config_count_changed",
] {
if value[field].as_bool().unwrap_or(false) {
changed_sections += 1;
}
}
for field in [
"project_kinds_added",
"project_kinds_removed",
"manifests_added",
"manifests_removed",
"task_runners_added",
"task_runners_removed",
"entrypoints_added",
"entrypoints_removed",
"configs_added",
"configs_removed",
"recommended_commands_added",
"recommended_commands_removed",
] {
if value[field]
.as_array()
.map(|items| !items.is_empty())
.unwrap_or(false)
{
changed_sections += 1;
}
}
println!("Changed sections: {changed_sections}");
let print_list = |label: &str, field: &str| {
if let Some(items) = value[field].as_array() {
if !items.is_empty() {
println!("{label}:");
for item in items {
println!(" - {}", item.as_str().unwrap_or("<unknown>"));
}
}
}
};
print_list("Project kinds added", "project_kinds_added");
print_list("Project kinds removed", "project_kinds_removed");
print_list("Entrypoints added", "entrypoints_added");
print_list("Entrypoints removed", "entrypoints_removed");
print_list("Recommended commands added", "recommended_commands_added");
print_list(
"Recommended commands removed",
"recommended_commands_removed",
);
}
fn print_traffic_discovery_report(value: &Value) {
println!(
"Traffic capture ({}): {}",
value["capture_kind"].as_str().unwrap_or("unknown"),
value["source"].as_str().unwrap_or("<unknown>")
);
println!(
"Discovered {} grouped endpoints across {} requests",
value["endpoint_count"].as_u64().unwrap_or(0),
value["request_count"].as_u64().unwrap_or(0)
);
if let Some(endpoints) = value["endpoints"].as_array() {
for endpoint in endpoints.iter().take(10) {
println!(
"- {} [{} request(s)]",
endpoint["key"].as_str().unwrap_or("<unknown>"),
endpoint["count"].as_u64().unwrap_or(0)
);
}
}
}
fn print_traffic_diff_report(value: &Value) {
println!(
"Traffic diff: {} -> {}",
value["before_source"].as_str().unwrap_or("<unknown>"),
value["after_source"].as_str().unwrap_or("<unknown>")
);
let mut changed_sections = 0u64;
for field in ["request_count_changed", "endpoint_count_changed"] {
if value[field].as_bool().unwrap_or(false) {
changed_sections += 1;
}
}
for field in [
"endpoints_added",
"endpoints_removed",
"status_codes_added",
"status_codes_removed",
"content_types_added",
"content_types_removed",
] {
if value[field]
.as_array()
.map(|items| !items.is_empty())
.unwrap_or(false)
{
changed_sections += 1;
}
}
println!("Changed sections: {changed_sections}");
let print_list = |label: &str, field: &str| {
if let Some(items) = value[field].as_array() {
if !items.is_empty() {
println!("{label}:");
for item in items {
println!(" - {}", item.as_str().unwrap_or("<unknown>"));
}
}
}
};
print_list("Endpoints added", "endpoints_added");
print_list("Endpoints removed", "endpoints_removed");
print_list("Status codes added", "status_codes_added");
print_list("Status codes removed", "status_codes_removed");
print_list("Content types added", "content_types_added");
print_list("Content types removed", "content_types_removed");
}
fn print_graphql_diff_report(value: &Value) {
println!(
"GraphQL diff: {} -> {}",
value["before_url"].as_str().unwrap_or("<unknown>"),
value["after_url"].as_str().unwrap_or("<unknown>")
);
let mut changed_sections = 0u64;
for field in [
"query_type_changed",
"mutation_type_changed",
"operation_count_changed",
"type_count_changed",
] {
if value[field].as_bool().unwrap_or(false) {
changed_sections += 1;
}
}
for field in [
"operations_added",
"operations_removed",
"types_added",
"types_removed",
] {
if value[field]
.as_array()
.map(|items| !items.is_empty())
.unwrap_or(false)
{
changed_sections += 1;
}
}
println!("Changed sections: {changed_sections}");
let print_list = |label: &str, field: &str| {
if let Some(items) = value[field].as_array() {
if !items.is_empty() {
println!("{label}:");
for item in items {
println!(" - {}", item.as_str().unwrap_or("<unknown>"));
}
}
}
};
print_list("Operations added", "operations_added");
print_list("Operations removed", "operations_removed");
print_list("Types added", "types_added");
print_list("Types removed", "types_removed");
}
enum ConnectedMcpClient {
Stdio(mcp_stdio::StdioClient),
Http(mcp_http::HttpClient),
}
impl ConnectedMcpClient {
async fn connect(config: &BakeConfig) -> Result<Self> {
match config.source_type {
SourceType::Stdio => {
let env = parse_env_vars(&config.env_vars);
Ok(Self::Stdio(
mcp_stdio::StdioClient::connect(
&config.source,
&env,
config.base_dir.as_deref(),
)
.await?,
))
}
SourceType::Http => {
let headers = parse_headers(&config.auth_headers)?;
Ok(Self::Http(
mcp_http::HttpClient::connect(
&config.source,
&headers,
parse_timeout(config.timeout_seconds),
)
.await?,
))
}
_ => Err(sxmc::error::SxmcError::Other(format!(
"Bake '{}' is not an MCP connection. Only stdio/http bakes are supported.",
config.name
))),
}
}
async fn list_tools(&self) -> Result<Vec<Tool>> {
match self {
Self::Stdio(client) => client.list_tools().await,
Self::Http(client) => client.list_tools().await,
}
}
fn server_info(&self) -> Option<ServerInfo> {
match self {
Self::Stdio(client) => client.server_info(),
Self::Http(client) => client.server_info(),
}
}
async fn call_tool(
&self,
name: &str,
arguments: serde_json::Map<String, serde_json::Value>,
) -> Result<rmcp::model::CallToolResult> {
match self {
Self::Stdio(client) => client.call_tool(name, arguments).await,
Self::Http(client) => client.call_tool(name, arguments).await,
}
}
async fn list_prompts(&self) -> Result<Vec<Prompt>> {
match self {
Self::Stdio(client) => client.list_prompts().await,
Self::Http(client) => client.list_prompts().await,
}
}
async fn get_prompt(
&self,
name: &str,
arguments: Option<serde_json::Map<String, serde_json::Value>>,
) -> Result<rmcp::model::GetPromptResult> {
match self {
Self::Stdio(client) => client.get_prompt(name, arguments).await,
Self::Http(client) => client.get_prompt(name, arguments).await,
}
}
async fn list_resources(&self) -> Result<Vec<Resource>> {
match self {
Self::Stdio(client) => client.list_resources().await,
Self::Http(client) => client.list_resources().await,
}
}
async fn read_resource(&self, uri: &str) -> Result<rmcp::model::ReadResourceResult> {
match self {
Self::Stdio(client) => client.read_resource(uri).await,
Self::Http(client) => client.read_resource(uri).await,
}
}
async fn close(self) -> Result<()> {
match self {
Self::Stdio(client) => client.close().await,
Self::Http(client) => client.close().await,
}
}
}
fn baked_mcp_servers(store: &BakeStore) -> Vec<&BakeConfig> {
store
.list()
.into_iter()
.filter(|config| matches!(config.source_type, SourceType::Stdio | SourceType::Http))
.collect()
}
fn get_baked_mcp_server(store: &BakeStore, name: &str) -> Result<BakeConfig> {
let config = store.get(name).cloned().ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Bake '{}' not found. Use `sxmc mcp servers` to see available MCP connections.",
name
))
})?;
if !matches!(config.source_type, SourceType::Stdio | SourceType::Http) {
return Err(sxmc::error::SxmcError::Other(format!(
"Bake '{}' uses {:?}, not stdio/http MCP.",
name, config.source_type
)));
}
Ok(config)
}
async fn finish_connected_mcp_client<T>(
client: ConnectedMcpClient,
result: Result<T>,
) -> Result<T> {
let close_result = client.close().await;
match (result, close_result) {
(Ok(value), Ok(())) => Ok(value),
(Err(error), Ok(())) => Err(error),
(Ok(_), Err(error)) => Err(error),
(Err(error), Err(_)) => Err(error),
}
}
async fn connect_named_baked_mcp_client(name: &str) -> Result<ConnectedMcpClient> {
let store = BakeStore::load()?;
let config = get_baked_mcp_server(&store, name)?;
ConnectedMcpClient::connect(&config).await
}
fn split_server_target(target: &str) -> Result<(&str, &str)> {
target.split_once('/').ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Invalid target '{}'. Expected SERVER/NAME.",
target
))
})
}
fn parse_json_object_arg(
payload: Option<String>,
) -> Result<serde_json::Map<String, serde_json::Value>> {
let Some(payload) = payload else {
return Ok(serde_json::Map::new());
};
let raw = if payload == "-" {
use std::io::Read;
let mut buffer = String::new();
std::io::stdin()
.read_to_string(&mut buffer)
.map_err(|e| sxmc::error::SxmcError::Other(format!("Failed to read stdin: {}", e)))?;
buffer
} else {
payload
};
if raw.trim().is_empty() {
return Ok(serde_json::Map::new());
}
let value: Value = serde_json::from_str(&raw).map_err(|e| {
sxmc::error::SxmcError::Other(format!("MCP tool payload must be a JSON object: {}", e))
})?;
value.as_object().cloned().ok_or_else(|| {
sxmc::error::SxmcError::Other("MCP tool payload must be a JSON object.".into())
})
}
fn looks_like_argument_shape_error(message: &str) -> bool {
let lower = message.to_ascii_lowercase();
lower.contains("invalid params")
|| lower.contains("validation")
|| lower.contains("expected object")
|| lower.contains("missing required")
|| lower.contains("required property")
|| lower.contains("schema")
}
fn annotate_mcp_tool_call_error(
error: sxmc::error::SxmcError,
inspect_hint: &str,
session_hint: Option<&str>,
) -> sxmc::error::SxmcError {
let message = match error {
sxmc::error::SxmcError::McpError(message) => message,
sxmc::error::SxmcError::Other(message) => message,
other => return other,
};
let mut notes = Vec::new();
if looks_like_argument_shape_error(&message) {
notes.push(format!(
"Inspect the tool schema first with `{}`.",
inspect_hint
));
}
if let Some(session_hint) = session_hint {
notes.push(format!(
"If the tool expects multi-step state, use `{}` instead of repeated one-shot calls.",
session_hint
));
}
notes.push(
"When machine-parsing structured output, consume stdout only; informational `[sxmc]` lines are written to stderr."
.into(),
);
sxmc::error::SxmcError::Other(format!(
"{}\n\nRecovery hints:\n- {}",
message,
notes.join("\n- ")
))
}
fn parse_optional_kv_args(args: &[String]) -> Option<serde_json::Map<String, serde_json::Value>> {
let arguments = parse_kv_args(args);
if arguments.is_empty() {
None
} else {
Some(arguments)
}
}
fn format_mcp_grep_results(
results: &[(String, Tool)],
pattern: &str,
limit: Option<usize>,
) -> String {
let total = results.len();
if total == 0 {
return format!("No MCP tools matched '{}'.", pattern);
}
let shown = limit.unwrap_or(total).min(total);
let mut lines = Vec::new();
for (server, tool) in results.iter().take(shown) {
lines.push(format!(" {}/{}", server, tool.name.as_ref()));
if let Some(description) = &tool.description {
lines.push(format!(" {}", description));
}
}
let header = if shown < total {
format!("Matches for '{}' ({} shown of {}):", pattern, shown, total)
} else {
format!("Matches for '{}' ({}):", pattern, total)
};
format!("{}\n{}", header, lines.join("\n"))
}
#[derive(Clone, Copy)]
enum McpSurface {
Tools,
Prompts,
Resources,
}
impl McpSurface {
fn label(self) -> &'static str {
match self {
Self::Tools => "tool",
Self::Prompts => "prompt",
Self::Resources => "resource",
}
}
fn plural_label(self) -> &'static str {
match self {
Self::Tools => "tools",
Self::Prompts => "prompts",
Self::Resources => "resources",
}
}
}
#[derive(Clone, Debug, Default)]
struct McpCapabilities {
tools: Option<bool>,
prompts: Option<bool>,
resources: Option<bool>,
}
impl McpCapabilities {
fn from_server_info(server_info: Option<&ServerInfo>) -> Self {
match server_info {
Some(info) => Self {
tools: Some(info.capabilities.tools.is_some()),
prompts: Some(info.capabilities.prompts.is_some()),
resources: Some(info.capabilities.resources.is_some()),
},
None => Self::default(),
}
}
fn supports(&self, surface: McpSurface) -> Option<bool> {
match surface {
McpSurface::Tools => self.tools,
McpSurface::Prompts => self.prompts,
McpSurface::Resources => self.resources,
}
}
}
fn is_capability_not_supported(error: &sxmc::error::SxmcError) -> bool {
match error {
sxmc::error::SxmcError::McpError(message) => {
let lower = message.to_ascii_lowercase();
lower.contains("-32601")
|| lower.contains("method not found")
|| lower.contains("not supported")
}
_ => false,
}
}
async fn list_optional_surface<T, F>(
surface: McpSurface,
advertised: Option<bool>,
list_future: F,
) -> Result<Vec<T>>
where
F: std::future::Future<Output = Result<Vec<T>>>,
{
if advertised == Some(false) {
eprintln!(
"[sxmc] Skipping {} listing because the MCP server did not advertise that capability during initialization.",
surface.label()
);
return Ok(Vec::new());
}
match list_future.await {
Ok(items) => Ok(items),
Err(error) if is_capability_not_supported(&error) => {
eprintln!(
"[sxmc] Skipping {} listing because the MCP server does not advertise that capability.",
surface.label()
);
Ok(Vec::new())
}
Err(error) => Err(error),
}
}
fn print_empty_surface_notice(surface: McpSurface, advertised: Option<bool>) {
if advertised == Some(false) {
println!(
"No {} available. The MCP server did not advertise {} support.",
surface.plural_label(),
surface.label()
);
} else {
match surface {
McpSurface::Tools => println!("No tools available."),
McpSurface::Prompts => println!("No prompts available."),
McpSurface::Resources => println!("No resources available."),
}
}
}
fn build_mcp_description(
server_info: Option<&ServerInfo>,
tools: &[Tool],
prompts: &[Prompt],
resources: &[Resource],
limit: Option<usize>,
) -> Value {
let tool_limit = limit.unwrap_or(tools.len()).min(tools.len());
let prompt_limit = limit.unwrap_or(prompts.len()).min(prompts.len());
let resource_limit = limit.unwrap_or(resources.len()).min(resources.len());
let mut description = output::summarize_server_info(server_info);
description["detail_mode"] = json!("summary");
description["counts"] = json!({
"tools": tools.len(),
"prompts": prompts.len(),
"resources": resources.len(),
});
description["shown"] = json!({
"tools": tool_limit,
"prompts": prompt_limit,
"resources": resource_limit,
});
description["truncated"] = json!({
"tools": tool_limit < tools.len(),
"prompts": prompt_limit < prompts.len(),
"resources": resource_limit < resources.len(),
});
if let Some(limit) = limit {
description["limit"] = json!(limit);
}
description["tools"] = Value::Array(
tools
.iter()
.take(tool_limit)
.map(output::summarize_tool_brief)
.collect(),
);
description["prompts"] = Value::Array(
prompts
.iter()
.take(prompt_limit)
.map(output::summarize_prompt)
.collect(),
);
description["resources"] = Value::Array(
resources
.iter()
.take(resource_limit)
.map(output::summarize_resource)
.collect(),
);
description
}
#[derive(Clone, Copy)]
struct McpBridgeRequest<'a> {
prompt: Option<&'a str>,
resource_uri: Option<&'a str>,
args: &'a [String],
list: bool,
list_tools: bool,
list_prompts: bool,
list_resources: bool,
search: Option<&'a str>,
describe: bool,
describe_tool: Option<&'a str>,
format: Option<output::StructuredOutputFormat>,
limit: Option<usize>,
pretty: bool,
}
impl McpBridgeRequest<'_> {
fn introspection_requested(self) -> bool {
self.list
|| self.list_tools
|| self.list_prompts
|| self.list_resources
|| self.search.is_some()
|| self.describe
|| self.describe_tool.is_some()
}
}
async fn run_mcp_bridge_command(
client: &ConnectedMcpClient,
request: McpBridgeRequest<'_>,
) -> Result<()> {
let server_info = client.server_info();
let capabilities = McpCapabilities::from_server_info(server_info.as_ref());
let (tool_name, tool_args) = request
.args
.split_first()
.map(|(name, rest)| (Some(name.as_str()), rest))
.unwrap_or((None, &[]));
if request.introspection_requested() {
let needs_tools = request.list
|| request.list_tools
|| request.search.is_some()
|| request.describe
|| request.describe_tool.is_some();
let needs_prompts = request.list || request.list_prompts || request.describe;
let needs_resources = request.list || request.list_resources || request.describe;
let tools = if needs_tools {
list_optional_surface(
McpSurface::Tools,
capabilities.supports(McpSurface::Tools),
client.list_tools(),
)
.await?
} else {
Vec::new()
};
if let Some(name) = request.describe_tool {
let tool = tools
.iter()
.find(|tool| tool.name.as_ref() == name)
.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!("Tool not found: {}", name))
})?;
println!(
"{}",
output::format_tool_detail(tool, request.pretty, request.format)
);
return Ok(());
}
if request.describe {
let prompts = if needs_prompts {
list_optional_surface(
McpSurface::Prompts,
capabilities.supports(McpSurface::Prompts),
client.list_prompts(),
)
.await?
} else {
Vec::new()
};
let resources = if needs_resources {
list_optional_surface(
McpSurface::Resources,
capabilities.supports(McpSurface::Resources),
client.list_resources(),
)
.await?
} else {
Vec::new()
};
let description = build_mcp_description(
server_info.as_ref(),
&tools,
&prompts,
&resources,
request.limit,
);
let format = output::resolve_structured_format(request.format, request.pretty);
println!("{}", output::format_structured_value(&description, format));
return Ok(());
}
let mut printed_any = false;
if request.list || request.list_tools || request.search.is_some() {
println!(
"{}",
output::format_tool_list(&tools, request.search, request.limit)
);
printed_any = true;
}
if request.list || request.list_prompts {
let prompts = list_optional_surface(
McpSurface::Prompts,
capabilities.supports(McpSurface::Prompts),
client.list_prompts(),
)
.await?;
if printed_any {
println!();
}
if prompts.is_empty() {
print_empty_surface_notice(
McpSurface::Prompts,
capabilities.supports(McpSurface::Prompts),
);
} else {
println!("{}", output::format_prompt_list(&prompts, request.limit));
}
printed_any = true;
}
if request.list || request.list_resources {
let resources = list_optional_surface(
McpSurface::Resources,
capabilities.supports(McpSurface::Resources),
client.list_resources(),
)
.await?;
if printed_any {
println!();
}
if resources.is_empty() {
print_empty_surface_notice(
McpSurface::Resources,
capabilities.supports(McpSurface::Resources),
);
} else {
println!(
"{}",
output::format_resource_list(&resources, request.limit)
);
}
}
} else if let Some(name) = request.prompt {
let result = client
.get_prompt(name, parse_optional_kv_args(request.args))
.await?;
println!("{}", output::format_prompt_result(&result, request.pretty));
} else if let Some(uri) = request.resource_uri {
let result = client.read_resource(uri).await?;
println!(
"{}",
output::format_resource_result(&result, request.pretty)
);
} else if let Some(name) = tool_name {
let result = client
.call_tool(name, parse_kv_args(tool_args))
.await
.map_err(|error| {
annotate_mcp_tool_call_error(
error,
&format!("sxmc ... --describe-tool {}", name),
None,
)
})?;
println!("{}", output::format_tool_result(&result, request.pretty));
} else {
eprintln!("Specify a tool name, --prompt, --resource, or use --list");
std::process::exit(1);
}
Ok(())
}
fn mcp_session_help() -> &'static str {
r#"Stateful MCP session commands:
tools [--search PATTERN] [--limit N]
prompts [--limit N]
resources [--limit N]
describe [--pretty] [--format json|json-pretty|toon] [--limit N]
info TOOL [--pretty] [--format json|json-pretty|toon]
call TOOL [JSON_OBJECT|-] [--pretty]
prompt NAME [key=value ...] [--pretty]
read RESOURCE_URI [--pretty]
help
exit
Examples:
info sequentialthinking --format toon
call sequentialthinking '{"thought":"Step A","thoughtNumber":1,"totalThoughts":2,"nextThoughtNeeded":true}' --pretty
call sequentialthinking '{"thought":"Step B","thoughtNumber":2,"totalThoughts":2,"nextThoughtNeeded":false}' --pretty
"#
}
enum ParsedMcpSessionInput {
Action(McpSessionAction),
Help,
Exit,
}
fn parse_mcp_session_input(line: &str) -> Result<Option<ParsedMcpSessionInput>> {
let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with('#') {
return Ok(None);
}
match trimmed {
"help" => return Ok(Some(ParsedMcpSessionInput::Help)),
"exit" | "quit" => return Ok(Some(ParsedMcpSessionInput::Exit)),
_ => {}
}
let args = shlex::split(trimmed).ok_or_else(|| {
sxmc::error::SxmcError::Other("Failed to parse session command line.".into())
})?;
let mut argv = vec!["sxmc-session".to_string()];
argv.extend(args);
let parsed = McpSessionCli::try_parse_from(argv)
.map_err(|e| sxmc::error::SxmcError::Other(format!("Invalid session command:\n{}", e)))?;
Ok(Some(ParsedMcpSessionInput::Action(parsed.action)))
}
async fn print_mcp_tools(
client: &ConnectedMcpClient,
search: Option<&str>,
limit: Option<usize>,
) -> Result<()> {
let tools = client.list_tools().await?;
println!("{}", output::format_tool_list(&tools, search, limit));
Ok(())
}
async fn print_mcp_prompts(client: &ConnectedMcpClient, limit: Option<usize>) -> Result<()> {
let prompts = list_optional_surface(McpSurface::Prompts, None, client.list_prompts()).await?;
if prompts.is_empty() {
print_empty_surface_notice(McpSurface::Prompts, None);
} else {
println!("{}", output::format_prompt_list(&prompts, limit));
}
Ok(())
}
async fn print_mcp_resources(client: &ConnectedMcpClient, limit: Option<usize>) -> Result<()> {
let resources =
list_optional_surface(McpSurface::Resources, None, client.list_resources()).await?;
if resources.is_empty() {
print_empty_surface_notice(McpSurface::Resources, None);
} else {
println!("{}", output::format_resource_list(&resources, limit));
}
Ok(())
}
async fn print_mcp_tool_info(
client: &ConnectedMcpClient,
tool_name: &str,
pretty: bool,
format: Option<output::StructuredOutputFormat>,
) -> Result<()> {
let tools = client.list_tools().await?;
let tool = tools
.iter()
.find(|tool| tool.name.as_ref() == tool_name)
.ok_or_else(|| sxmc::error::SxmcError::Other(format!("Tool not found: {}", tool_name)))?;
println!("{}", output::format_tool_detail(tool, pretty, format));
Ok(())
}
async fn call_mcp_tool(
client: &ConnectedMcpClient,
tool_name: &str,
payload: Option<String>,
pretty: bool,
inspect_hint: &str,
session_hint: Option<&str>,
) -> Result<()> {
let arguments = parse_json_object_arg(payload)?;
let result = client
.call_tool(tool_name, arguments)
.await
.map_err(|error| annotate_mcp_tool_call_error(error, inspect_hint, session_hint))?;
println!("{}", output::format_tool_result(&result, pretty));
Ok(())
}
async fn read_mcp_resource(
client: &ConnectedMcpClient,
resource_uri: &str,
pretty: bool,
) -> Result<()> {
let result = client.read_resource(resource_uri).await?;
println!("{}", output::format_resource_result(&result, pretty));
Ok(())
}
async fn fetch_mcp_prompt(
client: &ConnectedMcpClient,
prompt_name: &str,
args: &[String],
pretty: bool,
) -> Result<()> {
let result = client
.get_prompt(prompt_name, parse_optional_kv_args(args))
.await?;
println!("{}", output::format_prompt_result(&result, pretty));
Ok(())
}
async fn describe_mcp_server(
client: &ConnectedMcpClient,
pretty: bool,
format: Option<output::StructuredOutputFormat>,
limit: Option<usize>,
) -> Result<()> {
let server_info = client.server_info();
let capabilities = McpCapabilities::from_server_info(server_info.as_ref());
let tools = list_optional_surface(
McpSurface::Tools,
capabilities.supports(McpSurface::Tools),
client.list_tools(),
)
.await?;
let prompts = list_optional_surface(
McpSurface::Prompts,
capabilities.supports(McpSurface::Prompts),
client.list_prompts(),
)
.await?;
let resources = list_optional_surface(
McpSurface::Resources,
capabilities.supports(McpSurface::Resources),
client.list_resources(),
)
.await?;
let description =
build_mcp_description(server_info.as_ref(), &tools, &prompts, &resources, limit);
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&description, format));
Ok(())
}
async fn execute_mcp_session_action(
client: &ConnectedMcpClient,
action: McpSessionAction,
) -> Result<()> {
match action {
McpSessionAction::Tools { search, limit } => {
print_mcp_tools(client, search.as_deref(), limit).await
}
McpSessionAction::Prompts { limit } => print_mcp_prompts(client, limit).await,
McpSessionAction::Resources { limit } => print_mcp_resources(client, limit).await,
McpSessionAction::Describe {
pretty,
format,
limit,
} => describe_mcp_server(client, pretty, format, limit).await,
McpSessionAction::Info {
tool,
pretty,
format,
} => print_mcp_tool_info(client, &tool, pretty, format).await,
McpSessionAction::Call {
tool,
payload,
pretty,
} => {
call_mcp_tool(
client,
&tool,
payload,
pretty,
&format!("info {} --format toon", tool),
Some("sxmc mcp session <server>"),
)
.await
}
McpSessionAction::Read { resource, pretty } => {
read_mcp_resource(client, &resource, pretty).await
}
McpSessionAction::Prompt {
prompt,
args,
pretty,
} => fetch_mcp_prompt(client, &prompt, &args, pretty).await,
}
}
async fn run_mcp_session<R: BufRead>(
client: &ConnectedMcpClient,
reader: R,
quiet: bool,
) -> Result<()> {
if !quiet {
eprintln!("{}", mcp_session_help().trim_end());
}
for line_result in reader.lines() {
let line = line_result.map_err(|e| {
sxmc::error::SxmcError::Other(format!("Failed to read session input: {}", e))
})?;
match parse_mcp_session_input(&line)? {
None => {}
Some(ParsedMcpSessionInput::Help) => {
println!("{}", mcp_session_help().trim_end());
}
Some(ParsedMcpSessionInput::Exit) => break,
Some(ParsedMcpSessionInput::Action(action)) => {
execute_mcp_session_action(client, action).await?;
}
}
}
Ok(())
}
fn parse_source_type(source_type: &str) -> SourceType {
match source_type {
"stdio" => SourceType::Stdio,
"http" => SourceType::Http,
"api" => SourceType::Api,
"spec" => SourceType::Spec,
"graphql" => SourceType::Graphql,
other => {
eprintln!(
"Unknown source type: {}. Use: stdio, http, api, spec, graphql",
other
);
std::process::exit(1);
}
}
}
fn resolve_generation_root(root: Option<PathBuf>) -> Result<PathBuf> {
match root {
Some(path) => Ok(path),
None => std::env::current_dir().map_err(Into::into),
}
}
fn resolve_install_paths(
root: Option<PathBuf>,
global: bool,
_local: bool,
) -> Result<InstallPaths> {
let project_root = resolve_generation_root(root)?;
Ok(if global {
InstallPaths::global(project_root)
} else {
InstallPaths::local(project_root)
})
}
fn scope_command_hint(install_paths: &InstallPaths) -> String {
match install_paths.scope() {
InstallScope::Local => format!("--root {}", install_paths.project_root().display()),
InstallScope::Global => "--global".into(),
}
}
fn doctor_target_key_for_host(client: AiClientProfile, config: bool) -> &'static str {
match (client, config) {
(AiClientProfile::ClaudeCode, false) => "claude_code",
(AiClientProfile::ClaudeCode, true) => "claude_code_mcp",
(AiClientProfile::Cursor, false) => "cursor_rules",
(AiClientProfile::Cursor, true) => "cursor_mcp",
(AiClientProfile::GeminiCli, false) => "gemini_cli",
(AiClientProfile::GeminiCli, true) => "gemini_mcp",
(AiClientProfile::GithubCopilot, false) => "github_copilot",
(AiClientProfile::GithubCopilot, true) => "github_copilot_config",
(AiClientProfile::ContinueDev, false) => "continue_dev",
(AiClientProfile::ContinueDev, true) => "continue_dev_config",
(AiClientProfile::OpenCode, false) => "open_code_agent_doc",
(AiClientProfile::OpenCode, true) => "open_code",
(AiClientProfile::JetbrainsAiAssistant, false) => "jetbrains_ai_assistant",
(AiClientProfile::JetbrainsAiAssistant, true) => "jetbrains_ai_assistant_config",
(AiClientProfile::Junie, false) => "junie",
(AiClientProfile::Junie, true) => "junie_config",
(AiClientProfile::Windsurf, false) => "windsurf",
(AiClientProfile::Windsurf, true) => "windsurf_config",
(AiClientProfile::OpenaiCodex, false) => "openai_codex_agent_doc",
(AiClientProfile::OpenaiCodex, true) => "openai_codex_mcp",
(AiClientProfile::GenericStdioMcp, false) => "generic_stdio_agent_doc",
(AiClientProfile::GenericStdioMcp, true) => "generic_stdio_mcp",
(AiClientProfile::GenericHttpMcp, false) => "generic_http_agent_doc",
(AiClientProfile::GenericHttpMcp, true) => "generic_http_mcp",
}
}
fn doctor_startup_targets(
install_paths: &InstallPaths,
only_hosts: &[AiClientProfile],
) -> Vec<(String, PathBuf)> {
if only_hosts.is_empty() {
let mut targets = vec![(
"portable_agent_doc".into(),
install_paths.portable_agent_doc_path(),
)];
for host in resolved_hosts(only_hosts) {
let spec = cli_surfaces::host_profile_spec(host);
if let Some(path) = install_paths.host_doc_path(host) {
let is_shared_portable = matches!(
host,
AiClientProfile::OpenCode
| AiClientProfile::OpenaiCodex
| AiClientProfile::GenericStdioMcp
| AiClientProfile::GenericHttpMcp
) && path == install_paths.portable_agent_doc_path();
if !is_shared_portable {
targets.push((doctor_target_key_for_host(host, false).into(), path));
}
}
if spec.native_config_target.is_some() {
if let Some(path) = install_paths.host_config_path(host) {
targets.push((doctor_target_key_for_host(host, true).into(), path));
}
}
}
return targets;
}
let mut targets = Vec::new();
for host in only_hosts {
let spec = cli_surfaces::host_profile_spec(*host);
if spec.native_doc_target.is_some() {
if let Some(path) = install_paths.host_doc_path(*host) {
targets.push((doctor_target_key_for_host(*host, false).into(), path));
}
}
if spec.native_config_target.is_some() {
if let Some(path) = install_paths.host_config_path(*host) {
targets.push((doctor_target_key_for_host(*host, true).into(), path));
}
}
}
targets
}
fn doctor_value(install_paths: &InstallPaths, only_hosts: &[AiClientProfile]) -> Result<Value> {
let bake_store = BakeStore::load()?;
let cache_stats = sxmc::cache::Cache::new(60 * 60 * 24 * 14)?.stats()?;
let startup_targets = doctor_startup_targets(install_paths, only_hosts);
let startup_files = startup_targets
.into_iter()
.map(|(name, path)| {
(
name.to_string(),
json!({
"path": path.display().to_string(),
"present": path.exists(),
}),
)
})
.collect::<serde_json::Map<_, _>>();
Ok(json!({
"root": install_paths.project_root().display().to_string(),
"install_scope": install_paths.scope().as_str(),
"state_root": install_paths.state_root().display().to_string(),
"checked_hosts": only_hosts
.iter()
.map(|host| cli_surfaces::host_profile_spec(*host).sidecar_scope)
.collect::<Vec<_>>(),
"baked_mcp_servers": bake_store.list().len(),
"portable_profile_dir": {
"path": install_paths.profile_dir().display().to_string(),
"present": install_paths.profile_dir().exists(),
},
"cache": {
"path": cache_stats.path.display().to_string(),
"entry_count": cache_stats.entry_count,
"total_bytes": cache_stats.total_bytes,
"default_ttl_secs": cache_stats.default_ttl_secs,
},
"startup_files": startup_files,
"recommended_first_moves": [
{
"surface": "unknown_cli",
"command": "sxmc inspect cli <tool> --depth 1 --format json-pretty",
"why": "Get a structured profile instead of pasting raw help text into context."
},
{
"surface": "unknown_mcp_server",
"command": "sxmc stdio \"<cmd>\" --list",
"why": "Discover tools, prompts, and resources before guessing JSON-RPC calls."
},
{
"surface": "known_baked_mcp",
"command": "sxmc mcp grep <pattern>",
"why": "Search across baked MCP servers before opening every schema."
},
{
"surface": "unknown_api",
"command": "sxmc api <url-or-spec> --list",
"why": "List real operations from the live spec instead of hand-constructing URLs."
},
{
"surface": "startup_install",
"command": "sxmc init ai --from-cli <tool> --coverage full --mode preview",
"why": "Generate reviewable startup docs and host configs before applying them."
},
{
"surface": "local_skills_or_prompts",
"command": "sxmc serve --paths <dir>",
"why": "Expose a local skills directory as an MCP server when you want prompts and tools to show up in AI hosts."
},
{
"surface": "suspicious_skill_or_repo",
"command": "sxmc scan --paths <dir>",
"why": "Check for prompt injection, secrets, Unicode tricks, and dangerous script patterns."
}
]
}))
}
trait InstallRootLike {
fn saved_profiles_dir(&self) -> PathBuf;
fn sync_state_path(&self) -> PathBuf;
}
impl InstallRootLike for Path {
fn saved_profiles_dir(&self) -> PathBuf {
self.join(".sxmc").join("ai").join("profiles")
}
fn sync_state_path(&self) -> PathBuf {
self.join(".sxmc").join("state.json")
}
}
impl InstallRootLike for PathBuf {
fn saved_profiles_dir(&self) -> PathBuf {
self.as_path().saved_profiles_dir()
}
fn sync_state_path(&self) -> PathBuf {
self.as_path().sync_state_path()
}
}
impl InstallRootLike for InstallPaths {
fn saved_profiles_dir(&self) -> PathBuf {
self.profile_dir()
}
fn sync_state_path(&self) -> PathBuf {
self.sync_state_path()
}
}
fn default_saved_profiles_dir(root: &impl InstallRootLike) -> PathBuf {
root.saved_profiles_dir()
}
fn default_sync_state_path(root: &impl InstallRootLike) -> PathBuf {
root.sync_state_path()
}
fn bundle_slug(input: &str) -> String {
let mut out = String::new();
let mut last_sep = false;
for ch in input.chars() {
if ch.is_ascii_alphanumeric() {
out.push(ch.to_ascii_lowercase());
last_sep = false;
} else if !last_sep {
out.push('-');
last_sep = true;
}
}
let out = out.trim_matches('-').to_string();
if out.is_empty() {
"profile".into()
} else {
out
}
}
fn is_http_target(target: &str) -> bool {
target.starts_with("http://") || target.starts_with("https://")
}
fn file_uri_to_path(uri: &str) -> PathBuf {
PathBuf::from(uri.trim_start_matches("file://"))
}
fn bytes_to_hex(bytes: &[u8]) -> String {
let mut out = String::with_capacity(bytes.len() * 2);
for byte in bytes {
use std::fmt::Write as _;
let _ = write!(&mut out, "{:02x}", byte);
}
out
}
fn hex_to_bytes(input: &str) -> Result<Vec<u8>> {
let trimmed = input.trim();
if !trimmed.len().is_multiple_of(2) {
return Err(sxmc::error::SxmcError::Other(format!(
"Expected an even-length hex string, got {} characters",
trimmed.len()
)));
}
let mut out = Vec::with_capacity(trimmed.len() / 2);
let bytes = trimmed.as_bytes();
let mut index = 0;
while index < bytes.len() {
let pair = std::str::from_utf8(&bytes[index..index + 2]).map_err(|error| {
sxmc::error::SxmcError::Other(format!("Invalid UTF-8 in hex string: {}", error))
})?;
let value = u8::from_str_radix(pair, 16).map_err(|error| {
sxmc::error::SxmcError::Other(format!("Invalid hex byte '{}': {}", pair, error))
})?;
out.push(value);
index += 2;
}
Ok(out)
}
fn sha256_hex(bytes: &[u8]) -> String {
bytes_to_hex(&Sha256::digest(bytes))
}
fn resolved_hosts(only_hosts: &[AiClientProfile]) -> Vec<AiClientProfile> {
if only_hosts.is_empty() {
vec![
AiClientProfile::ClaudeCode,
AiClientProfile::Cursor,
AiClientProfile::GeminiCli,
AiClientProfile::GithubCopilot,
AiClientProfile::ContinueDev,
AiClientProfile::OpenCode,
AiClientProfile::JetbrainsAiAssistant,
AiClientProfile::Junie,
AiClientProfile::Windsurf,
AiClientProfile::OpenaiCodex,
]
} else {
only_hosts.to_vec()
}
}
fn collect_profile_paths(paths: &[PathBuf], recursive: bool) -> Result<Vec<PathBuf>> {
fn visit_dir(dir: &Path, recursive: bool, results: &mut Vec<PathBuf>) -> Result<()> {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
if recursive {
visit_dir(&path, recursive, results)?;
}
} else if path
.extension()
.and_then(|ext| ext.to_str())
.map(|ext| ext.eq_ignore_ascii_case("json"))
.unwrap_or(false)
{
results.push(path);
}
}
Ok(())
}
let mut results = Vec::new();
for path in paths {
if path.is_dir() {
visit_dir(path, recursive, &mut results)?;
} else if path.is_file() {
results.push(path.clone());
}
}
results.sort();
results.dedup();
Ok(results)
}
fn load_bundle_value(path: &Path) -> Result<Value> {
let value: Value = serde_json::from_str(&fs::read_to_string(path)?)?;
let schema = value
.get("bundle_schema")
.and_then(Value::as_str)
.unwrap_or_default();
if schema != PROFILE_BUNDLE_SCHEMA {
return Err(sxmc::error::SxmcError::Other(format!(
"Bundle file '{}' is not a valid sxmc profile bundle. Expected `bundle_schema: {}`.",
path.display(),
PROFILE_BUNDLE_SCHEMA
)));
}
Ok(value)
}
fn validate_bundle_value(value: Value, source_label: &str) -> Result<Value> {
let schema = value
.get("bundle_schema")
.and_then(Value::as_str)
.unwrap_or_default();
if schema != PROFILE_BUNDLE_SCHEMA {
return Err(sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' is not a valid sxmc profile bundle. Expected `bundle_schema: {}`.",
source_label, PROFILE_BUNDLE_SCHEMA
)));
}
Ok(value)
}
fn bundle_sha256_from_value(value: &Value) -> Result<String> {
Ok(sha256_hex(&serde_json::to_vec(value)?))
}
fn unsigned_bundle_value(value: &Value) -> Value {
let mut unsigned = value.clone();
if let Some(object) = unsigned.as_object_mut() {
object.remove("signature");
}
unsigned
}
fn bundle_signature_from_value(value: &Value, secret: &str) -> Result<String> {
let payload = serde_json::to_vec(&unsigned_bundle_value(value))?;
let mut mac = Hmac::<Sha256>::new_from_slice(secret.as_bytes()).map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to initialize bundle signature generator: {}",
error
))
})?;
mac.update(&payload);
let bytes = mac.finalize().into_bytes();
Ok(bytes_to_hex(bytes.as_slice()))
}
fn load_signing_key(path: &Path) -> Result<(SigningKey, String)> {
let value: Value = serde_json::from_str(&fs::read_to_string(path)?)?;
let seed_hex = value.get("seed").and_then(Value::as_str).ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Signing key '{}' is missing a `seed` field.",
path.display()
))
})?;
let seed = hex_to_bytes(seed_hex)?;
let seed: [u8; 32] = seed.try_into().map_err(|_| {
sxmc::error::SxmcError::Other(format!(
"Signing key '{}' must contain a 32-byte Ed25519 seed.",
path.display()
))
})?;
let signing_key = SigningKey::from_bytes(&seed);
let public_key = bytes_to_hex(signing_key.verifying_key().as_bytes());
Ok((signing_key, public_key))
}
fn load_verifying_key(path: &Path) -> Result<VerifyingKey> {
let value: Value = serde_json::from_str(&fs::read_to_string(path)?)?;
let public_hex = value
.get("public_key")
.and_then(Value::as_str)
.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Public key '{}' is missing a `public_key` field.",
path.display()
))
})?;
let public = hex_to_bytes(public_hex)?;
let public: [u8; 32] = public.try_into().map_err(|_| {
sxmc::error::SxmcError::Other(format!(
"Public key '{}' must contain a 32-byte Ed25519 public key.",
path.display()
))
})?;
VerifyingKey::from_bytes(&public).map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to decode Ed25519 public key '{}': {}",
path.display(),
error
))
})
}
fn ed25519_signature_from_value(value: &Value, signing_key: &SigningKey) -> Result<String> {
let payload = serde_json::to_vec(&unsigned_bundle_value(value))?;
Ok(bytes_to_hex(&signing_key.sign(&payload).to_bytes()))
}
fn sign_bundle_value(
mut value: Value,
signature_secret: Option<&str>,
signing_key: Option<&Path>,
) -> Result<Value> {
if let Some(secret) = signature_secret {
let signature = bundle_signature_from_value(&value, secret)?;
if let Some(object) = value.as_object_mut() {
object.insert(
"signature".into(),
json!({
"algorithm": PROFILE_BUNDLE_SIGNATURE_ALGORITHM,
"value": signature,
}),
);
}
} else if let Some(signing_key_path) = signing_key {
let (signing_key, public_key) = load_signing_key(signing_key_path)?;
let signature = ed25519_signature_from_value(&value, &signing_key)?;
if let Some(object) = value.as_object_mut() {
object.insert(
"signature".into(),
json!({
"algorithm": PROFILE_BUNDLE_SIGNATURE_ALGORITHM_ED25519,
"value": signature,
"public_key": public_key,
}),
);
}
}
Ok(value)
}
fn bundle_signature_report(value: &Value) -> Value {
match value.get("signature") {
Some(Value::Object(signature)) => json!({
"present": true,
"algorithm": signature.get("algorithm").and_then(Value::as_str),
"value": signature.get("value").and_then(Value::as_str),
"public_key": signature.get("public_key").and_then(Value::as_str),
}),
_ => json!({
"present": false,
"algorithm": Value::Null,
"value": Value::Null,
"public_key": Value::Null,
}),
}
}
fn verify_bundle_signature(
value: &Value,
signature_secret: Option<&str>,
public_key: Option<&Path>,
source_label: &str,
) -> Result<Value> {
let base = bundle_signature_report(value);
if signature_secret.is_none() && public_key.is_none() {
return Ok(base);
}
let signature = value
.get("signature")
.and_then(Value::as_object)
.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' is missing embedded signature metadata. Re-export it with --signature-secret before verifying.",
source_label
))
})?;
let algorithm = signature
.get("algorithm")
.and_then(Value::as_str)
.unwrap_or_default();
let expected = signature
.get("value")
.and_then(Value::as_str)
.filter(|value| !value.is_empty())
.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' is missing an embedded signature value.",
source_label
))
})?;
match algorithm {
PROFILE_BUNDLE_SIGNATURE_ALGORITHM => {
let secret = signature_secret.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' uses HMAC signature verification. Re-run with --signature-secret.",
source_label
))
})?;
let actual = bundle_signature_from_value(value, secret)?;
if !actual.eq_ignore_ascii_case(expected) {
return Err(sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' did not match the expected embedded signature.\nExpected: {}\nActual: {}",
source_label, expected, actual
)));
}
}
PROFILE_BUNDLE_SIGNATURE_ALGORITHM_ED25519 => {
let embedded_public = signature
.get("public_key")
.and_then(Value::as_str)
.filter(|value| !value.is_empty())
.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' is missing an embedded Ed25519 public key.",
source_label
))
})?;
let verifying_key = if let Some(path) = public_key {
let key = load_verifying_key(path)?;
let expected_key = bytes_to_hex(key.as_bytes());
if !expected_key.eq_ignore_ascii_case(embedded_public) {
return Err(sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' was signed by a different public key than '{}'.",
source_label,
path.display()
)));
}
key
} else {
let bytes = hex_to_bytes(embedded_public)?;
let bytes: [u8; 32] = bytes.try_into().map_err(|_| {
sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' embeds an invalid Ed25519 public key.",
source_label
))
})?;
VerifyingKey::from_bytes(&bytes).map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to decode embedded Ed25519 public key for '{}': {}",
source_label, error
))
})?
};
let signature_bytes = hex_to_bytes(expected)?;
let signature_bytes: [u8; 64] = signature_bytes.try_into().map_err(|_| {
sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' embeds an invalid Ed25519 signature.",
source_label
))
})?;
let signature = Signature::from_bytes(&signature_bytes);
let payload = serde_json::to_vec(&unsigned_bundle_value(value))?;
verifying_key
.verify(&payload, &signature)
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' did not match the embedded Ed25519 signature: {}",
source_label, error
))
})?;
}
other => {
return Err(sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' uses unsupported signature algorithm '{}'. Expected '{}' or '{}'.",
source_label,
other,
PROFILE_BUNDLE_SIGNATURE_ALGORITHM,
PROFILE_BUNDLE_SIGNATURE_ALGORITHM_ED25519
)));
}
}
let mut verified = base;
if let Some(object) = verified.as_object_mut() {
object.insert("verified".into(), Value::Bool(true));
}
Ok(verified)
}
fn verify_bundle_digest(
value: &Value,
expected_sha256: Option<&str>,
source_label: &str,
) -> Result<String> {
let actual = bundle_sha256_from_value(value)?;
if let Some(expected) = expected_sha256 {
if !actual.eq_ignore_ascii_case(expected) {
return Err(sxmc::error::SxmcError::Other(format!(
"Bundle source '{}' did not match the expected SHA-256.\nExpected: {}\nActual: {}",
source_label, expected, actual
)));
}
}
Ok(actual)
}
fn bundle_metadata_value(
bundle_name: Option<&str>,
description: Option<&str>,
role: Option<&str>,
hosts: &[AiClientProfile],
) -> Value {
json!({
"name": bundle_name,
"description": description,
"role": role,
"hosts": hosts
.iter()
.map(|host| cli_surfaces::host_profile_spec(*host).sidecar_scope)
.collect::<Vec<_>>(),
})
}
fn export_profile_bundle_value(
profile_paths: &[PathBuf],
bundle_name: Option<&str>,
description: Option<&str>,
role: Option<&str>,
hosts: &[AiClientProfile],
) -> Result<Value> {
let mut profiles = Vec::new();
let mut entries = Vec::new();
for path in profile_paths {
let profile = cli_surfaces::load_profile(path)?;
entries.push(json!({
"command": profile.command,
"path": path.display().to_string(),
}));
profiles.push(serde_json::to_value(profile)?);
}
Ok(json!({
"bundle_schema": PROFILE_BUNDLE_SCHEMA,
"generated_by": "sxmc",
"generator_version": env!("CARGO_PKG_VERSION"),
"generated_at": Utc::now().to_rfc3339(),
"profile_count": profiles.len(),
"metadata": bundle_metadata_value(bundle_name, description, role, hosts),
"entries": entries,
"profiles": profiles,
}))
}
#[derive(Copy, Clone)]
enum BundleImportMode {
Unique,
Overwrite,
SkipExisting,
}
fn import_profile_bundle_from_value(
source_label: &str,
bundle_value: Value,
output_dir: &Path,
mode: BundleImportMode,
) -> Result<Value> {
let profiles: Vec<cli_surfaces::CliSurfaceProfile> = bundle_value
.get("profiles")
.and_then(Value::as_array)
.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Bundle file '{}' is missing a `profiles` array.",
source_label
))
})?
.iter()
.cloned()
.map(serde_json::from_value)
.collect::<std::result::Result<Vec<_>, _>>()
.map_err(sxmc::error::SxmcError::from)?;
fs::create_dir_all(output_dir)?;
let mut written = Vec::new();
let mut skipped = Vec::new();
let mut slug_counts: HashMap<String, usize> = HashMap::new();
for profile in profiles {
let base_slug = bundle_slug(&profile.command);
let target = match mode {
BundleImportMode::Overwrite => output_dir.join(format!("{base_slug}.json")),
BundleImportMode::SkipExisting => {
let path = output_dir.join(format!("{base_slug}.json"));
if path.exists() {
skipped.push(json!({
"command": profile.command,
"path": path.display().to_string(),
"reason": "existing file preserved",
}));
continue;
}
path
}
BundleImportMode::Unique => {
let count = slug_counts.entry(base_slug.clone()).or_insert(0);
let mut path = output_dir.join(format!("{base_slug}.json"));
while path.exists() {
*count += 1;
path = output_dir.join(format!("{base_slug}-{}.json", *count + 1));
}
path
}
};
fs::write(
&target,
serde_json::to_string_pretty(&cli_surfaces::profile_value(&profile))?,
)?;
written.push(json!({
"command": profile.command,
"path": target.display().to_string(),
}));
}
Ok(json!({
"bundle_schema": PROFILE_BUNDLE_SCHEMA,
"input": source_label,
"output_dir": output_dir.display().to_string(),
"metadata": bundle_value.get("metadata").cloned().unwrap_or(Value::Null),
"imported_count": written.len(),
"skipped_count": skipped.len(),
"written": written,
"skipped": skipped,
}))
}
fn import_profile_bundle_value(
input: &Path,
output_dir: &Path,
mode: BundleImportMode,
) -> Result<Value> {
let bundle_value = load_bundle_value(input)?;
import_profile_bundle_from_value(&input.display().to_string(), bundle_value, output_dir, mode)
}
fn request_header_map(headers: &[(String, String)]) -> Result<reqwest::header::HeaderMap> {
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
let mut map = HeaderMap::new();
for (key, value) in headers {
let name = HeaderName::from_bytes(key.as_bytes()).map_err(|error| {
sxmc::error::SxmcError::Other(format!("Invalid HTTP header name '{}': {}", key, error))
})?;
let value = HeaderValue::from_str(value).map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Invalid HTTP header value for '{}': {}",
key, error
))
})?;
map.insert(name, value);
}
Ok(map)
}
async fn publish_bundle_target(
target: &str,
bundle_value: &Value,
headers: &[(String, String)],
timeout: Option<Duration>,
) -> Result<Value> {
if is_http_target(target) {
let client = reqwest::Client::builder()
.timeout(timeout.unwrap_or(Duration::from_secs(30)))
.build()
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to create HTTP client for bundle publish: {}",
error
))
})?;
let response = client
.put(target)
.headers(request_header_map(headers)?)
.header(reqwest::header::CONTENT_TYPE, "application/json")
.body(serde_json::to_vec_pretty(bundle_value)?)
.send()
.await
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to publish profile bundle to '{}': {}",
target, error
))
})?;
let status = response.status();
response.error_for_status().map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to publish profile bundle to '{}': {}",
target, error
))
})?;
Ok(json!({
"target": target,
"transport": "http",
"http_status": status.as_u16(),
}))
} else {
let target_path = if target.starts_with("file://") {
file_uri_to_path(target)
} else {
PathBuf::from(target)
};
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&target_path, serde_json::to_string_pretty(bundle_value)?)?;
Ok(json!({
"target": target_path.display().to_string(),
"transport": "file",
}))
}
}
async fn read_bundle_source(
source: &str,
headers: &[(String, String)],
timeout: Option<Duration>,
) -> Result<Value> {
if is_http_target(source) {
let client = reqwest::Client::builder()
.timeout(timeout.unwrap_or(Duration::from_secs(30)))
.build()
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to create HTTP client for bundle pull: {}",
error
))
})?;
let response = client
.get(source)
.headers(request_header_map(headers)?)
.send()
.await
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to pull profile bundle from '{}': {}",
source, error
))
})?
.error_for_status()
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to pull profile bundle from '{}': {}",
source, error
))
})?;
let value: Value = response.json().await.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Profile bundle response from '{}' was not valid JSON: {}",
source, error
))
})?;
validate_bundle_value(value, source)
} else {
let path = if source.starts_with("file://") {
file_uri_to_path(source)
} else {
PathBuf::from(source)
};
validate_bundle_value(load_bundle_value(&path)?, source)
}
}
fn drift_entry_for_profile(path: &Path, allow_self: bool) -> Value {
match cli_surfaces::load_profile(path) {
Ok(saved) => match cli_surfaces::inspect_cli_with_depth(
&saved.command,
allow_self,
profile_generation_depth(&saved),
) {
Ok(live) => {
let diff = cli_surfaces::diff_profile_value(&saved, &live);
json!({
"path": path.display().to_string(),
"command": saved.command,
"changed": diff_value_has_changes(&diff),
"error": Value::Null,
"diff": diff,
})
}
Err(error) => json!({
"path": path.display().to_string(),
"command": saved.command,
"changed": false,
"error": error.to_string(),
}),
},
Err(error) => json!({
"path": path.display().to_string(),
"command": Value::Null,
"changed": false,
"error": error.to_string(),
}),
}
}
fn profile_freshness_value(profile: &cli_surfaces::CliSurfaceProfile) -> Value {
let generated_at = profile.provenance.generated_at.trim();
if generated_at.is_empty() {
return json!({
"known": false,
"generated_at": Value::Null,
"age_days": Value::Null,
"stale": Value::Null,
});
}
match chrono::DateTime::parse_from_rfc3339(generated_at) {
Ok(parsed) => {
let parsed = parsed.with_timezone(&Utc);
let age_days = Utc::now().signed_duration_since(parsed).num_days().max(0);
json!({
"known": true,
"generated_at": generated_at,
"age_days": age_days,
"stale": age_days > PROFILE_STALE_DAYS,
})
}
Err(_) => json!({
"known": false,
"generated_at": generated_at,
"age_days": Value::Null,
"stale": Value::Null,
"parse_error": true,
}),
}
}
fn saved_profile_inventory_value(profile_paths: &[PathBuf]) -> Value {
let mut entries = Vec::new();
let mut ready_count = 0usize;
let mut stale_count = 0usize;
let mut freshness_known_count = 0usize;
let mut error_count = 0usize;
for path in profile_paths {
match cli_surfaces::load_profile(path) {
Ok(profile) => {
let quality = profile.quality_report();
let freshness = profile_freshness_value(&profile);
if quality.ready_for_agent_docs {
ready_count += 1;
}
if freshness["known"].as_bool().unwrap_or(false) {
freshness_known_count += 1;
}
if freshness["stale"].as_bool().unwrap_or(false) {
stale_count += 1;
}
entries.push(json!({
"path": path.display().to_string(),
"command": profile.command,
"summary": profile.summary,
"subcommand_count": profile.subcommands.len(),
"option_count": profile.options.len(),
"quality": {
"ready_for_agent_docs": quality.ready_for_agent_docs,
"score": quality.score,
"level": quality.level,
"reasons": quality.reasons,
},
"freshness": freshness,
"provenance": {
"generated_at": profile.provenance.generated_at,
"generator_version": profile.provenance.generator_version,
"source_kind": profile.provenance.source_kind,
}
}));
}
Err(error) => {
error_count += 1;
entries.push(json!({
"path": path.display().to_string(),
"error": error.to_string(),
}));
}
}
}
let total = entries.len();
json!({
"count": total,
"ready_count": ready_count,
"not_ready_count": total.saturating_sub(ready_count + error_count),
"freshness_known_count": freshness_known_count,
"stale_count": stale_count,
"fresh_count": freshness_known_count.saturating_sub(stale_count),
"unknown_freshness_count": total.saturating_sub(freshness_known_count + error_count),
"error_count": error_count,
"stale_after_days": PROFILE_STALE_DAYS,
"entries": entries,
})
}
fn drift_value(profile_paths: &[PathBuf], allow_self: bool) -> Value {
let entries: Vec<Value> = profile_paths
.iter()
.map(|path| drift_entry_for_profile(path, allow_self))
.collect();
let changed_count = entries
.iter()
.filter(|entry| entry["changed"].as_bool().unwrap_or(false))
.count();
let error_count = entries
.iter()
.filter(|entry| !entry["error"].is_null())
.count();
json!({
"count": entries.len(),
"changed_count": changed_count,
"unchanged_count": entries.len().saturating_sub(changed_count + error_count),
"error_count": error_count,
"entries": entries,
})
}
fn status_value(install_paths: &InstallPaths, only_hosts: &[AiClientProfile]) -> Result<Value> {
let mut value = doctor_value(install_paths, only_hosts)?;
let profile_dir = default_saved_profiles_dir(install_paths);
let (drift, inventory) = if profile_dir.exists() {
let paths = collect_profile_paths(std::slice::from_ref(&profile_dir), true)?;
let inventory = saved_profile_inventory_value(&paths);
let drift = drift_value(&paths, true);
(drift, inventory)
} else {
(
json!({
"count": 0,
"changed_count": 0,
"unchanged_count": 0,
"error_count": 0,
"entries": [],
}),
json!({
"count": 0,
"ready_count": 0,
"not_ready_count": 0,
"freshness_known_count": 0,
"stale_count": 0,
"fresh_count": 0,
"unknown_freshness_count": 0,
"error_count": 0,
"stale_after_days": PROFILE_STALE_DAYS,
"entries": [],
}),
)
};
if let Some(object) = value.as_object_mut() {
object.insert(
"saved_profiles".into(),
json!({
"path": profile_dir.display().to_string(),
"present": profile_dir.exists(),
"drift": drift,
"inventory": inventory,
}),
);
object.insert(
"sync_state".into(),
sync_state_summary_value(install_paths, &drift),
);
}
Ok(value)
}
fn export_profile_corpus_value(profile_paths: &[PathBuf]) -> Value {
let mut entries = Vec::new();
let mut error_count = 0usize;
let mut ready_count = 0usize;
let mut stale_count = 0usize;
let mut freshness_known_count = 0usize;
for path in profile_paths {
match cli_surfaces::load_profile(path) {
Ok(profile) => {
let quality = profile.quality_report();
let freshness = profile_freshness_value(&profile);
if quality.ready_for_agent_docs {
ready_count += 1;
}
if freshness["known"].as_bool().unwrap_or(false) {
freshness_known_count += 1;
}
if freshness["stale"].as_bool().unwrap_or(false) {
stale_count += 1;
}
entries.push(json!({
"type": "profile",
"path": path.display().to_string(),
"command": profile.command,
"summary": profile.summary,
"quality": {
"ready_for_agent_docs": quality.ready_for_agent_docs,
"score": quality.score,
"level": quality.level,
"reasons": quality.reasons,
},
"freshness": freshness,
"profile": cli_surfaces::profile_value(&profile),
}));
}
Err(error) => {
error_count += 1;
entries.push(json!({
"type": "error",
"path": path.display().to_string(),
"error": error.to_string(),
}));
}
}
}
let total = entries.len();
json!({
"corpus_schema": PROFILE_CORPUS_SCHEMA,
"generated_at": Utc::now().to_rfc3339(),
"count": total,
"ready_count": ready_count,
"not_ready_count": total.saturating_sub(ready_count + error_count),
"freshness_known_count": freshness_known_count,
"stale_count": stale_count,
"fresh_count": freshness_known_count.saturating_sub(stale_count),
"unknown_freshness_count": total.saturating_sub(freshness_known_count + error_count),
"error_count": error_count,
"stale_after_days": PROFILE_STALE_DAYS,
"entries": entries,
})
}
fn load_corpus_value(path: &Path) -> Result<Value> {
let value: Value = serde_json::from_slice(&fs::read(path)?)?;
let schema = value
.get("corpus_schema")
.and_then(Value::as_str)
.unwrap_or_default();
if schema != PROFILE_CORPUS_SCHEMA {
return Err(sxmc::error::SxmcError::Other(format!(
"Corpus file '{}' is not a valid sxmc profile corpus. Expected `corpus_schema: {}`.",
path.display(),
PROFILE_CORPUS_SCHEMA
)));
}
Ok(value)
}
fn corpus_stats_value(value: &Value) -> Value {
let entries = value["entries"].as_array().cloned().unwrap_or_default();
let profile_entries = entries
.iter()
.filter(|entry| entry["type"] == "profile")
.cloned()
.collect::<Vec<_>>();
let command_count = profile_entries
.iter()
.filter_map(|entry| entry["command"].as_str())
.collect::<std::collections::HashSet<_>>()
.len();
let ready_count = profile_entries
.iter()
.filter(|entry| {
entry["quality"]["ready_for_agent_docs"]
.as_bool()
.unwrap_or(false)
})
.count();
let stale_count = profile_entries
.iter()
.filter(|entry| entry["freshness"]["stale"].as_bool().unwrap_or(false))
.count();
let average_quality_score = if profile_entries.is_empty() {
0.0
} else {
profile_entries
.iter()
.map(|entry| entry["quality"]["score"].as_u64().unwrap_or(0) as f64)
.sum::<f64>()
/ profile_entries.len() as f64
};
json!({
"corpus_schema": value["corpus_schema"],
"generated_at": value["generated_at"],
"count": value["count"],
"profile_count": profile_entries.len(),
"error_count": value["error_count"],
"command_count": command_count,
"ready_count": ready_count,
"stale_count": stale_count,
"average_quality_score": average_quality_score,
})
}
fn corpus_query_value(
value: &Value,
command: Option<&str>,
search: Option<&str>,
limit: usize,
) -> Value {
let search = search.map(|item| item.to_lowercase());
let mut entries = value["entries"]
.as_array()
.cloned()
.unwrap_or_default()
.into_iter()
.filter(|entry| entry["type"] == "profile")
.filter(|entry| {
command.is_none_or(|needle| entry["command"].as_str() == Some(needle))
&& search.as_ref().is_none_or(|needle| {
let command = entry["command"].as_str().unwrap_or_default().to_lowercase();
let summary = entry["summary"].as_str().unwrap_or_default().to_lowercase();
command.contains(needle) || summary.contains(needle)
})
})
.collect::<Vec<_>>();
entries.sort_by(|a, b| {
b["quality"]["score"]
.as_u64()
.unwrap_or(0)
.cmp(&a["quality"]["score"].as_u64().unwrap_or(0))
});
let total_matches = entries.len();
entries.truncate(limit);
json!({
"corpus_schema": value["corpus_schema"],
"query": {
"command": command,
"search": search,
"limit": limit,
},
"match_count": total_matches,
"entries": entries,
})
}
fn generate_bundle_keypair_value(output_dir: &Path) -> Result<Value> {
fs::create_dir_all(output_dir)?;
let private_path = output_dir.join("bundle-signing.ed25519.key.json");
let public_path = output_dir.join("bundle-signing.ed25519.pub.json");
let signing_key = SigningKey::generate(&mut OsRng);
let verifying_key = signing_key.verifying_key();
let private_value = json!({
"algorithm": PROFILE_BUNDLE_SIGNATURE_ALGORITHM_ED25519,
"seed": bytes_to_hex(&signing_key.to_bytes()),
"public_key": bytes_to_hex(verifying_key.as_bytes()),
});
let public_value = json!({
"algorithm": PROFILE_BUNDLE_SIGNATURE_ALGORITHM_ED25519,
"public_key": bytes_to_hex(verifying_key.as_bytes()),
});
fs::write(&private_path, serde_json::to_string_pretty(&private_value)?)?;
fs::write(&public_path, serde_json::to_string_pretty(&public_value)?)?;
Ok(json!({
"algorithm": PROFILE_BUNDLE_SIGNATURE_ALGORITHM_ED25519,
"private_key": private_path.display().to_string(),
"public_key": public_path.display().to_string(),
"fingerprint": sha256_hex(verifying_key.as_bytes()),
}))
}
fn load_profiles_from_intelligence_input(
input: &Path,
) -> Result<Vec<(String, cli_surfaces::CliSurfaceProfile)>> {
if input.is_dir() {
return collect_profile_paths(&[input.to_path_buf()], true)?
.into_iter()
.map(|path| {
let label = path.display().to_string();
cli_surfaces::load_profile(&path).map(|profile| (label, profile))
})
.collect();
}
let text = fs::read_to_string(input)?;
let value: Value = serde_json::from_str(&text)?;
if value.get("bundle_schema").and_then(Value::as_str) == Some(PROFILE_BUNDLE_SCHEMA) {
let profiles = value["profiles"].as_array().cloned().unwrap_or_default();
return profiles
.into_iter()
.enumerate()
.map(|(index, value)| {
let profile: cli_surfaces::CliSurfaceProfile = serde_json::from_value(value)?;
Ok((format!("{}#{}", input.display(), index + 1), profile))
})
.collect();
}
Ok(vec![(
input.display().to_string(),
cli_surfaces::load_profile(input)?,
)])
}
fn known_good_value(input: &Path, command: &str) -> Result<Value> {
let mut candidates = load_profiles_from_intelligence_input(input)?
.into_iter()
.filter(|(_, profile)| profile.command == command)
.map(|(source, profile)| {
let quality = profile.quality_report();
let freshness = profile_freshness_value(&profile);
let freshness_bonus = if freshness["stale"].as_bool() == Some(false) {
15i64
} else if freshness["known"].as_bool() == Some(true) {
0
} else {
5
};
let provenance_bonus =
if profile.provenance.generator_version == env!("CARGO_PKG_VERSION") {
5i64
} else {
0
};
let rank_score = quality.score as i64 + freshness_bonus + provenance_bonus;
json!({
"source": source,
"command": profile.command,
"summary": profile.summary,
"quality": {
"score": quality.score,
"level": quality.level,
"ready_for_agent_docs": quality.ready_for_agent_docs,
},
"freshness": freshness,
"provenance": profile.provenance,
"rank_score": rank_score,
"profile": cli_surfaces::profile_value(&profile),
})
})
.collect::<Vec<_>>();
candidates.sort_by(|a, b| {
b["rank_score"]
.as_i64()
.unwrap_or(0)
.cmp(&a["rank_score"].as_i64().unwrap_or(0))
});
if candidates.is_empty() {
return Err(sxmc::error::SxmcError::Other(format!(
"No saved profiles for '{}' were found in '{}'.",
command,
input.display()
)));
}
Ok(json!({
"command": command,
"candidate_count": candidates.len(),
"selected": candidates.first().cloned().unwrap_or(Value::Null),
"alternatives": candidates.into_iter().skip(1).collect::<Vec<_>>(),
}))
}
fn bundle_profile_quality_summary(bundle_value: &Value) -> Value {
let profiles = bundle_value["profiles"]
.as_array()
.cloned()
.unwrap_or_default();
let mut count = 0u64;
let mut ready = 0u64;
let mut stale = 0u64;
let mut total_quality = 0u64;
for value in profiles {
if let Ok(profile) = serde_json::from_value::<cli_surfaces::CliSurfaceProfile>(value) {
count += 1;
let quality = profile.quality_report();
total_quality += quality.score as u64;
if quality.ready_for_agent_docs {
ready += 1;
}
if profile_freshness_value(&profile)["stale"].as_bool() == Some(true) {
stale += 1;
}
}
}
json!({
"profile_count": count,
"ready_count": ready,
"stale_count": stale,
"average_quality_score": if count == 0 { 0.0 } else { total_quality as f64 / count as f64 },
})
}
fn trust_report_value(
input: &str,
bundle_value: &Value,
sha256: String,
signature: Value,
expected_sha256: Option<&str>,
) -> Value {
json!({
"bundle_schema": PROFILE_BUNDLE_SCHEMA,
"input": input,
"verified": true,
"sha256": sha256,
"expected_sha256": expected_sha256,
"signature": signature,
"metadata": bundle_value.get("metadata").cloned().unwrap_or(Value::Null),
"generated_by": bundle_value.get("generated_by").cloned().unwrap_or(Value::Null),
"generator_version": bundle_value.get("generator_version").cloned().unwrap_or(Value::Null),
"generated_at": bundle_value.get("generated_at").cloned().unwrap_or(Value::Null),
"quality": bundle_profile_quality_summary(bundle_value),
})
}
struct RegistrySourceValue {
label: String,
registry: Value,
base_dir: Option<PathBuf>,
base_url: Option<reqwest::Url>,
}
#[derive(Clone)]
struct RegistryServerState {
registry_dir: PathBuf,
}
struct TrustPolicyConfig<'a> {
require_signature: bool,
require_verified_signature: bool,
min_average_quality: Option<f64>,
max_stale_count: Option<u64>,
min_ready_count: Option<u64>,
require_role: Option<&'a str>,
require_hosts: &'a [String],
}
async fn read_registry_source(
source: &str,
headers: &[(String, String)],
timeout: Option<Duration>,
) -> Result<RegistrySourceValue> {
if is_http_target(source) {
let client = reqwest::Client::builder()
.timeout(timeout.unwrap_or(Duration::from_secs(30)))
.build()
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to create HTTP client for registry pull: {}",
error
))
})?;
let response = client
.get(source)
.headers(request_header_map(headers)?)
.send()
.await
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to pull registry from '{}': {}",
source, error
))
})?
.error_for_status()
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to pull registry from '{}': {}",
source, error
))
})?;
let registry: Value = response.json().await.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Registry response from '{}' was not valid JSON: {}",
source, error
))
})?;
let schema = registry["registry_schema"].as_str().unwrap_or_default();
if schema != PROFILE_REGISTRY_SCHEMA {
return Err(sxmc::error::SxmcError::Other(format!(
"Registry '{}' is not a valid sxmc registry. Expected `registry_schema: {}`.",
source, PROFILE_REGISTRY_SCHEMA
)));
}
let mut base_url = reqwest::Url::parse(source).ok();
if let Some(url) = base_url.as_mut() {
if !url.path().ends_with('/') {
let path = url.path().to_string();
if let Some((prefix, _)) = path.rsplit_once('/') {
url.set_path(&format!("{}/", prefix));
} else {
url.set_path("/");
}
}
}
Ok(RegistrySourceValue {
label: source.to_string(),
registry,
base_dir: None,
base_url,
})
} else {
let path = if source.starts_with("file://") {
file_uri_to_path(source)
} else {
PathBuf::from(source)
};
let (registry, base_dir) = if path.is_dir() {
(load_registry_value(&path)?, path)
} else {
let value: Value = serde_json::from_str(&fs::read_to_string(&path)?)?;
let schema = value["registry_schema"].as_str().unwrap_or_default();
if schema != PROFILE_REGISTRY_SCHEMA {
return Err(sxmc::error::SxmcError::Other(format!(
"Registry '{}' is not a valid sxmc registry. Expected `registry_schema: {}`.",
path.display(),
PROFILE_REGISTRY_SCHEMA
)));
}
(
value,
path.parent()
.map(Path::to_path_buf)
.unwrap_or_else(|| PathBuf::from(".")),
)
};
Ok(RegistrySourceValue {
label: source.to_string(),
registry,
base_dir: Some(base_dir),
base_url: None,
})
}
}
fn resolve_registry_location(raw: &str, source: &RegistrySourceValue) -> Option<String> {
if raw.is_empty() {
return None;
}
if is_http_target(raw) || raw.starts_with("file://") {
return Some(raw.to_string());
}
let path = PathBuf::from(raw);
if path.is_absolute() {
return Some(path.display().to_string());
}
if let Some(base_url) = &source.base_url {
if let Ok(url) = base_url.join(raw) {
return Some(url.to_string());
}
}
source
.base_dir
.as_ref()
.map(|base_dir| base_dir.join(raw).display().to_string())
}
fn resolve_registry_entry_source(entry: &Value, source: &RegistrySourceValue) -> Option<String> {
for key in ["source", "path"] {
if let Some(raw) = entry.get(key).and_then(Value::as_str) {
if let Some(resolved) = resolve_registry_location(raw, source) {
return Some(resolved);
}
}
}
None
}
async fn registry_sync_value(
source: &str,
registry_dir: &Path,
headers: &[(String, String)],
timeout: Option<Duration>,
) -> Result<Value> {
let source_registry = read_registry_source(source, headers, timeout).await?;
let entries = source_registry.registry["entries"]
.as_array()
.cloned()
.unwrap_or_default();
let existing = if registry_index_path(registry_dir).exists() {
load_registry_value(registry_dir)?
} else {
registry_init_value(registry_dir)?;
load_registry_value(registry_dir)?
};
let mut known_sha256 = existing["entries"]
.as_array()
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|entry| entry["sha256"].as_str().map(str::to_string))
.collect::<HashSet<_>>();
let mut imported = Vec::new();
let mut skipped = Vec::new();
let mut errors = Vec::new();
for entry in entries {
let name = entry["name"]
.as_str()
.or_else(|| entry["slug"].as_str())
.unwrap_or("<unnamed>")
.to_string();
let Some(bundle_source) = resolve_registry_entry_source(&entry, &source_registry) else {
errors.push(json!({
"name": name,
"error": "registry entry did not contain a resolvable `source` or `path`",
}));
continue;
};
match read_bundle_source(&bundle_source, headers, timeout).await {
Ok(bundle_value) => {
let expected_sha256 = entry["sha256"].as_str();
match verify_bundle_digest(&bundle_value, expected_sha256, &bundle_source) {
Ok(sha256) => {
if known_sha256.contains(&sha256) {
skipped.push(json!({
"name": name,
"source": bundle_source,
"sha256": sha256,
"reason": "already present",
}));
continue;
}
let added = registry_add_bundle_value(
registry_dir,
&bundle_source,
&bundle_value,
&sha256,
)?;
known_sha256.insert(sha256.clone());
imported.push(json!({
"name": name,
"source": bundle_source,
"sha256": sha256,
"entry": added["entry"],
}));
}
Err(error) => errors.push(json!({
"name": name,
"source": bundle_source,
"error": error.to_string(),
})),
}
}
Err(error) => errors.push(json!({
"name": name,
"source": bundle_source,
"error": error.to_string(),
})),
}
}
Ok(json!({
"registry_schema": PROFILE_REGISTRY_SCHEMA,
"source": source_registry.label,
"registry": registry_dir.display().to_string(),
"entry_count": imported.len() + skipped.len() + errors.len(),
"imported_count": imported.len(),
"skipped_count": skipped.len(),
"error_count": errors.len(),
"imported": imported,
"skipped": skipped,
"errors": errors,
}))
}
fn normalize_registry_http_target(base: &str, suffix: &str) -> String {
if base.ends_with(suffix) {
base.to_string()
} else if base.ends_with('/') {
format!("{}{}", base.trim_end_matches('/'), suffix)
} else {
format!("{base}{suffix}")
}
}
async fn registry_push_target(
registry: &str,
source_label: &str,
bundle_value: &Value,
headers: &[(String, String)],
timeout: Option<Duration>,
) -> Result<Value> {
let sha256 = bundle_sha256_from_value(bundle_value)?;
if is_http_target(registry) {
let target = normalize_registry_http_target(registry, "/bundles");
let client = reqwest::Client::builder()
.timeout(timeout.unwrap_or(Duration::from_secs(30)))
.build()
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to create HTTP client for registry push: {}",
error
))
})?;
let response = client
.put(&target)
.headers(request_header_map(headers)?)
.header(reqwest::header::CONTENT_TYPE, "application/json")
.body(serde_json::to_vec_pretty(bundle_value)?)
.send()
.await
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to push profile bundle to registry '{}': {}",
registry, error
))
})?;
let status = response.status();
let value: Value = response
.error_for_status()
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to push profile bundle to registry '{}': {}",
registry, error
))
})?
.json()
.await
.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Registry push response from '{}' was not valid JSON: {}",
registry, error
))
})?;
Ok(json!({
"registry": registry,
"transport": "http",
"http_status": status.as_u16(),
"sha256": sha256,
"source": source_label,
"result": value,
}))
} else {
let registry_dir = if registry.starts_with("file://") {
file_uri_to_path(registry)
} else {
PathBuf::from(registry)
};
let value = registry_add_bundle_value(®istry_dir, source_label, bundle_value, &sha256)?;
Ok(json!({
"registry": registry_dir.display().to_string(),
"transport": "file",
"sha256": sha256,
"source": source_label,
"result": value,
}))
}
}
async fn registry_root_handler() -> &'static str {
"sxmc profile registry server\nIndex: /index.json\nPush bundles: PUT /bundles\nHealth: /healthz\n"
}
async fn registry_health_handler(State(state): State<RegistryServerState>) -> Json<Value> {
let entry_count = load_registry_value(&state.registry_dir)
.ok()
.and_then(|value| value["entries"].as_array().map(|items| items.len()))
.unwrap_or(0);
Json(json!({
"name": "sxmc-registry",
"version": env!("CARGO_PKG_VERSION"),
"status": "ok",
"registry": state.registry_dir.display().to_string(),
"entry_count": entry_count,
}))
}
async fn registry_index_handler(
State(state): State<RegistryServerState>,
) -> std::result::Result<Json<Value>, (StatusCode, String)> {
load_registry_value(&state.registry_dir)
.map(Json)
.map_err(|error| (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()))
}
async fn registry_bundle_handler(
AxumPath(name): AxumPath<String>,
State(state): State<RegistryServerState>,
) -> Response {
let path = state.registry_dir.join("bundles").join(&name);
match fs::read_to_string(&path) {
Ok(body) => (
StatusCode::OK,
[(
header::CONTENT_TYPE,
HeaderValue::from_static("application/json"),
)],
body,
)
.into_response(),
Err(_) => (StatusCode::NOT_FOUND, "Bundle not found\n").into_response(),
}
}
async fn registry_put_bundle_handler(
State(state): State<RegistryServerState>,
Json(bundle): Json<Value>,
) -> std::result::Result<Json<Value>, (StatusCode, String)> {
let bundle = validate_bundle_value(bundle, "registry upload")
.map_err(|error| (StatusCode::BAD_REQUEST, error.to_string()))?;
let sha256 = bundle_sha256_from_value(&bundle)
.map_err(|error| (StatusCode::BAD_REQUEST, error.to_string()))?;
let slug = bundle["metadata"]["name"]
.as_str()
.map(bundle_slug)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| "bundle".to_string());
let public_source = format!("bundles/{}-{}.json", slug, &sha256[..12]);
let added = registry_add_bundle_value_with_public_source(
&state.registry_dir,
"registry-upload",
Some(&public_source),
&bundle,
&sha256,
)
.map_err(|error| (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()))?;
Ok(Json(json!({
"registry": state.registry_dir.display().to_string(),
"sha256": sha256,
"result": added,
})))
}
async fn serve_registry_http(
registry_dir: &Path,
host: &str,
port: u16,
limits: HttpServeLimits,
) -> Result<()> {
if !registry_index_path(registry_dir).exists() {
registry_init_value(registry_dir)?;
}
let bind_addr = format!("{host}:{port}");
let listener = tokio::net::TcpListener::bind(&bind_addr)
.await
.map_err(|e| sxmc::error::SxmcError::Other(format!("Failed to bind {bind_addr}: {e}")))?;
let local_addr = listener
.local_addr()
.map_err(|e| sxmc::error::SxmcError::Other(format!("Failed to read local addr: {e}")))?;
let state = RegistryServerState {
registry_dir: registry_dir.to_path_buf(),
};
let router = Router::new()
.route("/", get(registry_root_handler))
.route("/healthz", get(registry_health_handler))
.route("/index.json", get(registry_index_handler))
.route("/bundles", put(registry_put_bundle_handler))
.route("/bundles/{name}", get(registry_bundle_handler))
.with_state(state)
.layer(DefaultBodyLimit::max(limits.max_request_body_bytes))
.layer(RequestBodyLimitLayer::new(limits.max_request_body_bytes))
.layer(ConcurrencyLimitLayer::new(limits.max_concurrency));
eprintln!(
"[sxmc] Profile registry server listening at http://{}/index.json",
local_addr
);
let shutdown = async {
let _ = tokio::signal::ctrl_c().await;
};
axum::serve(listener, router)
.with_graceful_shutdown(shutdown)
.await
.map_err(|e| sxmc::error::SxmcError::Other(format!("Registry HTTP server failed: {e}")))?;
Ok(())
}
fn trust_policy_value(report: &Value, config: TrustPolicyConfig<'_>) -> Value {
let normalize_host = |value: &str| value.trim().replace('_', "-").to_lowercase();
let signature = &report["signature"];
let quality = &report["quality"];
let metadata = &report["metadata"];
let mut checks = Vec::new();
if config.require_signature {
let passed = signature["present"].as_bool() == Some(true);
checks.push(json!({
"name": "require_signature",
"passed": passed,
"detail": if passed { "bundle is signed" } else { "bundle is not signed" },
}));
}
if config.require_verified_signature {
let passed = signature["verified"].as_bool() == Some(true);
checks.push(json!({
"name": "require_verified_signature",
"passed": passed,
"detail": if passed { "embedded signature verified successfully" } else { "signature verification was not satisfied" },
}));
}
if let Some(min_average_quality) = config.min_average_quality {
let actual = quality["average_quality_score"].as_f64().unwrap_or(0.0);
let passed = actual >= min_average_quality;
checks.push(json!({
"name": "min_average_quality",
"passed": passed,
"expected": min_average_quality,
"actual": actual,
}));
}
if let Some(max_stale_count) = config.max_stale_count {
let actual = quality["stale_count"].as_u64().unwrap_or(0);
let passed = actual <= max_stale_count;
checks.push(json!({
"name": "max_stale_count",
"passed": passed,
"expected": max_stale_count,
"actual": actual,
}));
}
if let Some(min_ready_count) = config.min_ready_count {
let actual = quality["ready_count"].as_u64().unwrap_or(0);
let passed = actual >= min_ready_count;
checks.push(json!({
"name": "min_ready_count",
"passed": passed,
"expected": min_ready_count,
"actual": actual,
}));
}
if let Some(require_role) = config.require_role {
let actual = metadata["role"].as_str().unwrap_or_default();
let passed = actual == require_role;
checks.push(json!({
"name": "require_role",
"passed": passed,
"expected": require_role,
"actual": actual,
}));
}
if !config.require_hosts.is_empty() {
let actual_hosts = metadata["hosts"]
.as_array()
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|value| value.as_str().map(normalize_host))
.collect::<HashSet<_>>();
let expected_hosts = config
.require_hosts
.iter()
.map(|host| normalize_host(host))
.collect::<Vec<_>>();
let missing = expected_hosts
.iter()
.filter(|host| !actual_hosts.contains(host.as_str()))
.cloned()
.collect::<Vec<_>>();
let mut actual_hosts = actual_hosts.into_iter().collect::<Vec<_>>();
actual_hosts.sort();
checks.push(json!({
"name": "require_hosts",
"passed": missing.is_empty(),
"expected": expected_hosts,
"actual": actual_hosts,
"missing": missing,
}));
}
let passed = checks
.iter()
.all(|check| check["passed"].as_bool() == Some(true));
json!({
"bundle_schema": PROFILE_BUNDLE_SCHEMA,
"input": report["input"],
"passed": passed,
"policy": {
"require_signature": config.require_signature,
"require_verified_signature": config.require_verified_signature,
"min_average_quality": config.min_average_quality,
"max_stale_count": config.max_stale_count,
"min_ready_count": config.min_ready_count,
"require_role": config.require_role,
"require_hosts": config.require_hosts,
},
"checks": checks,
"report": report,
})
}
fn registry_index_path(dir: &Path) -> PathBuf {
dir.join("index.json")
}
fn load_registry_value(dir: &Path) -> Result<Value> {
let value: Value = serde_json::from_str(&fs::read_to_string(registry_index_path(dir))?)?;
let schema = value["registry_schema"].as_str().unwrap_or_default();
if schema != PROFILE_REGISTRY_SCHEMA {
return Err(sxmc::error::SxmcError::Other(format!(
"Registry '{}' is not a valid sxmc registry. Expected `registry_schema: {}`.",
dir.display(),
PROFILE_REGISTRY_SCHEMA
)));
}
Ok(value)
}
fn registry_init_value(dir: &Path) -> Result<Value> {
fs::create_dir_all(dir.join("bundles"))?;
let path = registry_index_path(dir);
let value = json!({
"registry_schema": PROFILE_REGISTRY_SCHEMA,
"generated_by": "sxmc",
"generator_version": env!("CARGO_PKG_VERSION"),
"generated_at": Utc::now().to_rfc3339(),
"entries": [],
});
fs::write(&path, serde_json::to_string_pretty(&value)?)?;
Ok(json!({
"registry_schema": PROFILE_REGISTRY_SCHEMA,
"path": dir.display().to_string(),
"index": path.display().to_string(),
"initialized": true,
}))
}
fn registry_add_bundle_value_with_public_source(
dir: &Path,
source: &str,
public_source: Option<&str>,
bundle_value: &Value,
sha256: &str,
) -> Result<Value> {
let mut registry = if registry_index_path(dir).exists() {
load_registry_value(dir)?
} else {
registry_init_value(dir)?;
load_registry_value(dir)?
};
fs::create_dir_all(dir.join("bundles"))?;
let metadata = bundle_value.get("metadata").cloned().unwrap_or(Value::Null);
let name = metadata
.get("name")
.and_then(Value::as_str)
.filter(|value| !value.is_empty())
.map(str::to_string)
.or_else(|| {
bundle_value["entries"]
.as_array()
.and_then(|items| items.first())
.and_then(|item| item["command"].as_str())
.map(str::to_string)
})
.unwrap_or_else(|| "bundle".to_string());
let slug = bundle_slug(&name);
let bundle_path = dir
.join("bundles")
.join(format!("{slug}-{}.json", &sha256[..12]));
fs::write(&bundle_path, serde_json::to_string_pretty(bundle_value)?)?;
let entry = json!({
"name": name,
"slug": slug,
"source": public_source.unwrap_or(source),
"path": bundle_path.display().to_string(),
"sha256": sha256,
"profile_count": bundle_value["profile_count"],
"metadata": metadata,
"signature": bundle_signature_report(bundle_value),
"published_at": Utc::now().to_rfc3339(),
});
if let Some(entries) = registry.get_mut("entries").and_then(Value::as_array_mut) {
entries.push(entry.clone());
}
fs::write(
registry_index_path(dir),
serde_json::to_string_pretty(®istry)?,
)?;
Ok(json!({
"registry_schema": PROFILE_REGISTRY_SCHEMA,
"registry": dir.display().to_string(),
"entry": entry,
}))
}
fn registry_add_bundle_value(
dir: &Path,
source: &str,
bundle_value: &Value,
sha256: &str,
) -> Result<Value> {
registry_add_bundle_value_with_public_source(dir, source, None, bundle_value, sha256)
}
fn registry_pull_value(
dir: &Path,
name: &str,
output_dir: &Path,
mode: BundleImportMode,
) -> Result<Value> {
let registry = load_registry_value(dir)?;
let mut matches = registry["entries"]
.as_array()
.cloned()
.unwrap_or_default()
.into_iter()
.filter(|entry| {
entry["name"].as_str() == Some(name) || entry["slug"].as_str() == Some(name)
})
.collect::<Vec<_>>();
matches.sort_by(|a, b| b["published_at"].as_str().cmp(&a["published_at"].as_str()));
let selected = matches.first().cloned().ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"Registry '{}' does not contain a bundle named '{}'.",
dir.display(),
name
))
})?;
let bundle_path = PathBuf::from(selected["path"].as_str().unwrap_or_default());
let imported = import_profile_bundle_value(&bundle_path, output_dir, mode)?;
Ok(json!({
"registry_schema": PROFILE_REGISTRY_SCHEMA,
"name": name,
"registry": dir.display().to_string(),
"selected": selected,
"import": imported,
}))
}
fn host_capability_map(
install_paths: &InstallPaths,
only_hosts: &[AiClientProfile],
) -> serde_json::Map<String, Value> {
let hosts = resolved_hosts(only_hosts);
let mut summary = serde_json::Map::new();
for host in hosts {
let spec = cli_surfaces::host_profile_spec(host);
let doc_present = install_paths
.host_doc_path(host)
.map(|path| path.exists())
.unwrap_or(false);
let config_present = install_paths
.host_config_path(host)
.map(|path| path.exists())
.unwrap_or(false);
summary.insert(
spec.sidecar_scope.into(),
json!({
"label": spec.label,
"doc_present": doc_present,
"config_present": config_present,
"ready": doc_present || config_present,
}),
);
}
summary
}
fn host_capability_value(install_paths: &InstallPaths, only_hosts: &[AiClientProfile]) -> Value {
Value::Object(host_capability_map(install_paths, only_hosts))
}
fn first_profile_command(inventory: &Value) -> Option<String> {
inventory["entries"].as_array().and_then(|entries| {
entries.iter().find_map(|entry| {
entry["command"]
.as_str()
.filter(|command| !command.is_empty())
.map(str::to_string)
})
})
}
fn ai_knowledge_value(
install_paths: &InstallPaths,
only_hosts: &[AiClientProfile],
inventory: &Value,
drift: &Value,
) -> Value {
let hosts = resolved_hosts(only_hosts);
let capability_map = host_capability_map(install_paths, &hosts);
let profile_count = inventory["count"].as_u64().unwrap_or(0);
let ready_profile_count = inventory["ready_count"].as_u64().unwrap_or(0);
let stale_profile_count = inventory["stale_count"].as_u64().unwrap_or(0);
let changed_profile_count = drift["changed_count"].as_u64().unwrap_or(0);
let first_command = first_profile_command(inventory);
let mut entries = serde_json::Map::new();
let mut configured_count = 0u64;
let mut stale_host_count = 0u64;
let mut unconfigured_count = 0u64;
for host in hosts {
let spec = cli_surfaces::host_profile_spec(host);
let key = spec.sidecar_scope;
let details = capability_map
.get(key)
.cloned()
.unwrap_or_else(|| json!({}));
let doc_present = details["doc_present"].as_bool().unwrap_or(false);
let config_present = details["config_present"].as_bool().unwrap_or(false);
let ready = details["ready"].as_bool().unwrap_or(false);
let missing_targets = [
(!doc_present && spec.native_doc_target.is_some()).then_some("doc"),
(!config_present && spec.native_config_target.is_some()).then_some("config"),
]
.into_iter()
.flatten()
.collect::<Vec<_>>();
let (state, summary) = if ready {
if profile_count == 0 {
configured_count += 1;
(
"configured_without_profiles",
format!(
"{} is configured, but Sumac has not saved any CLI profiles yet.",
spec.label
),
)
} else if stale_profile_count > 0 || changed_profile_count > 0 {
configured_count += 1;
stale_host_count += 1;
(
"configured_but_stale",
format!(
"{} is configured, but {} saved profile(s) are stale and {} drifted from the installed tools.",
spec.label, stale_profile_count, changed_profile_count
),
)
} else if ready_profile_count == 0 {
configured_count += 1;
(
"configured_with_low_confidence_profiles",
format!(
"{} is configured, but the saved profiles are not yet ready for startup-facing agent docs.",
spec.label
),
)
} else if !missing_targets.is_empty() {
configured_count += 1;
(
"configured_partially",
format!(
"{} has some managed host files, but it is still missing {} target(s): {}.",
spec.label,
missing_targets.len(),
missing_targets.join(", ")
),
)
} else {
configured_count += 1;
(
"configured",
format!(
"{} is configured and has {} ready saved profile(s) backing its AI context.",
spec.label, ready_profile_count
),
)
}
} else if profile_count > 0 {
unconfigured_count += 1;
(
"profiles_present_but_host_not_configured",
format!(
"{} has {} saved profile(s), but its startup files are not configured yet.",
spec.label, profile_count
),
)
} else {
unconfigured_count += 1;
(
"not_configured",
format!(
"{} is not configured and there are no saved profiles yet.",
spec.label
),
)
};
let command_hint = first_command
.clone()
.unwrap_or_else(|| "<tool>".to_string());
let recommended_commands = host_recommended_commands(
install_paths,
key,
state,
&command_hint,
doc_present,
config_present,
&missing_targets,
);
let recommended_command = recommended_commands
.first()
.and_then(|item| item["command"].as_str())
.map(str::to_string);
entries.insert(
key.into(),
json!({
"label": spec.label,
"doc_present": doc_present,
"config_present": config_present,
"ready": ready,
"state": state,
"saved_profile_count": profile_count,
"ready_profile_count": ready_profile_count,
"stale_profile_count": stale_profile_count,
"changed_profile_count": changed_profile_count,
"missing_targets": missing_targets,
"summary": summary,
"recommended_command": recommended_command,
"recommended_commands": recommended_commands,
}),
);
}
json!({
"configured_host_count": configured_count,
"stale_host_count": stale_host_count,
"unconfigured_host_count": unconfigured_count,
"hosts": entries,
})
}
fn status_recovery_plan_value(
install_paths: &InstallPaths,
ai_knowledge: &Value,
sync_state: &Value,
) -> Value {
let mut items = Vec::new();
if let Some(hosts) = ai_knowledge["hosts"].as_object() {
let mut entries = hosts.iter().collect::<Vec<_>>();
entries.sort_by(|a, b| a.0.cmp(b.0));
for (key, details) in entries {
let state = details["state"].as_str().unwrap_or("unknown");
if state == "configured" {
continue;
}
let command = details["recommended_command"]
.as_str()
.map(str::to_string)
.unwrap_or_else(|| {
format!(
"sxmc add <tool> {} --host {}",
scope_command_hint(install_paths),
key
)
});
let alternatives = details["recommended_commands"]
.as_array()
.map(|items| items.iter().skip(1).cloned().collect::<Vec<_>>())
.unwrap_or_default();
let (priority, severity, category) = recovery_plan_attributes(state);
items.push(json!({
"host": key,
"label": details["label"],
"state": state,
"priority": priority,
"severity": severity,
"category": category,
"summary": details["summary"],
"command": command,
"alternatives": alternatives,
}));
}
}
if sync_state["current_drift_count"].as_u64().unwrap_or(0) > 0 {
items.push(json!({
"host": Value::Null,
"label": "Saved CLI profiles",
"state": "sync_needed",
"priority": 1,
"severity": "warning",
"category": "sync",
"summary": format!(
"{} saved profile(s) drifted from the installed tools.",
sync_state["current_drift_count"].as_u64().unwrap_or(0)
),
"command": format!("sxmc sync {} --apply", scope_command_hint(install_paths)),
}));
}
json!({
"count": items.len(),
"items": items,
})
}
fn recovery_plan_attributes(state: &str) -> (u64, &'static str, &'static str) {
match state {
"configured_but_stale" => (1, "warning", "refresh"),
"configured_partially" => (2, "warning", "repair"),
"profiles_present_but_host_not_configured" => (2, "warning", "configure"),
"not_configured" => (3, "info", "onboard"),
_ => (3, "info", "repair"),
}
}
fn host_recommended_commands(
install_paths: &InstallPaths,
host_key: &str,
state: &str,
command_hint: &str,
doc_present: bool,
config_present: bool,
missing_targets: &[&str],
) -> Vec<Value> {
let scope_hint = scope_command_hint(install_paths);
let setup_command = format!(
"sxmc setup --tool {} --host {} {}",
command_hint, host_key, scope_hint
);
let add_command = format!(
"sxmc add {} --host {} {}",
command_hint, host_key, scope_hint
);
let sync_command = format!("sxmc sync {} --apply", scope_hint);
let drift_command = format!(
"sxmc inspect drift {} --recursive --format json-pretty",
default_saved_profiles_dir(install_paths).display()
);
let doctor_command = format!("sxmc doctor --fix {} --only {}", scope_hint, host_key);
let allow_low_confidence_command = format!(
"sxmc add {} --host {} {} --allow-low-confidence",
command_hint, host_key, scope_hint
);
let partially_configured = (doc_present || config_present) && !missing_targets.is_empty();
match state {
"configured_without_profiles" => vec![
json!({
"kind": "add",
"summary": "Inspect and save a tool profile for this configured host.",
"command": add_command,
}),
json!({
"kind": "setup",
"summary": "Onboard one or more tools for this host in one pass.",
"command": setup_command,
}),
],
"configured_but_stale" => vec![
json!({
"kind": "sync",
"summary": "Refresh saved profiles and rewrite affected host artifacts.",
"command": sync_command,
}),
json!({
"kind": "inspect-drift",
"summary": "Review profile drift before applying changes.",
"command": drift_command,
}),
],
"configured_with_low_confidence_profiles" => vec![
json!({
"kind": "add",
"summary": "Rebuild the host profile with low-confidence output explicitly allowed.",
"command": allow_low_confidence_command,
}),
json!({
"kind": "setup",
"summary": "Re-run the guided onboarding flow for this host and tool.",
"command": setup_command,
}),
],
"configured_partially" => vec![
json!({
"kind": "doctor-fix",
"summary": "Repair the partially configured host files in place.",
"command": doctor_command,
}),
json!({
"kind": "add",
"summary": "Reapply the saved tool profile to fill in the missing host targets.",
"command": add_command,
}),
],
"profiles_present_but_host_not_configured" if partially_configured => vec![
json!({
"kind": "doctor-fix",
"summary": "Repair the partially configured host files in place.",
"command": doctor_command,
}),
json!({
"kind": "add",
"summary": "Reapply the saved tool profile to this host.",
"command": add_command,
}),
],
"profiles_present_but_host_not_configured" => vec![
json!({
"kind": "add",
"summary": "Apply an existing saved profile to this host.",
"command": add_command,
}),
json!({
"kind": "setup",
"summary": "Onboard this host and tool from scratch.",
"command": setup_command,
}),
],
"not_configured" => vec![
json!({
"kind": "setup",
"summary": "Onboard this host and tool with the stable setup flow.",
"command": setup_command,
}),
json!({
"kind": "add",
"summary": "Apply a single tool directly to this host.",
"command": add_command,
}),
],
_ => vec![json!({
"kind": "doctor-fix",
"summary": "Repair generated host files using Sumac's inferred fix path.",
"command": doctor_command,
})],
}
}
fn watch_event_value(install_paths: &InstallPaths, reason: &str, value: &Value) -> Value {
json!({
"event_schema": "sxmc_watch_event_v1",
"reason": reason,
"root": install_paths.project_root().display().to_string(),
"install_scope": install_paths.scope().as_str(),
"observed_at": Utc::now().to_rfc3339(),
"status": value,
})
}
fn watch_notification_payload(template: WatchNotificationTemplate, event: &Value) -> Value {
let status = &event["status"];
let sync_state = &status["sync_state"];
let recovery_plan = &status["recovery_plan"];
let ai_hosts = status["ai_knowledge"]["hosts"]
.as_object()
.map(|hosts| hosts.len())
.unwrap_or(0);
let compact = json!({
"event_schema": "sxmc_watch_notification_v1",
"template": match template {
WatchNotificationTemplate::Standard => "standard",
WatchNotificationTemplate::Compact => "compact",
WatchNotificationTemplate::Slack => "slack",
},
"reason": event["reason"],
"root": event["root"],
"observed_at": event["observed_at"],
"summary": {
"host_count": ai_hosts,
"drift_count": sync_state["current_drift_count"].as_u64().unwrap_or(0),
"recovery_count": recovery_plan["count"].as_u64().unwrap_or(0),
"unhealthy_baked_count": status["baked_health"]["unhealthy_count"].as_u64().unwrap_or(0),
},
"commands_needing_sync": sync_state["commands_needing_sync"].clone(),
"top_recovery_items": recovery_plan["items"]
.as_array()
.map(|items| items.iter().take(3).cloned().collect::<Vec<_>>())
.unwrap_or_default(),
});
match template {
WatchNotificationTemplate::Standard => event.clone(),
WatchNotificationTemplate::Compact => compact,
WatchNotificationTemplate::Slack => {
let drift_count = compact["summary"]["drift_count"].as_u64().unwrap_or(0);
let recovery_count = compact["summary"]["recovery_count"].as_u64().unwrap_or(0);
let unhealthy_count = compact["summary"]["unhealthy_baked_count"]
.as_u64()
.unwrap_or(0);
let root = event["root"].as_str().unwrap_or(".");
let reason = event["reason"].as_str().unwrap_or("change");
let text = format!(
"sxmc watch {reason} for {root} — drift: {drift_count}, recovery: {recovery_count}, unhealthy: {unhealthy_count}"
);
json!({
"text": text,
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": format!("*sxmc watch {}*\n`{}`", reason, root),
}
},
{
"type": "section",
"fields": [
{ "type": "mrkdwn", "text": format!("*Drift*\n{}", drift_count) },
{ "type": "mrkdwn", "text": format!("*Recovery*\n{}", recovery_count) },
{ "type": "mrkdwn", "text": format!("*Unhealthy*\n{}", unhealthy_count) },
{ "type": "mrkdwn", "text": format!("*Observed*\n{}", event["observed_at"].as_str().unwrap_or("")) }
]
}
],
"sxmc_event": compact,
})
}
}
}
fn append_watch_notification(path: &Path, payload: &Value) -> Result<()> {
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)?;
}
}
let mut file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)?;
writeln!(file, "{}", serde_json::to_string(payload)?)?;
Ok(())
}
fn run_watch_notify_command(
command: &str,
event: &Value,
payload: &Value,
template: WatchNotificationTemplate,
) -> Result<()> {
let temp_event_path = std::env::temp_dir().join(format!(
"sxmc-watch-event-{}-{}.json",
std::process::id(),
Utc::now().timestamp_micros()
));
let temp_payload_path = std::env::temp_dir().join(format!(
"sxmc-watch-payload-{}-{}.json",
std::process::id(),
Utc::now().timestamp_micros()
));
fs::write(&temp_event_path, serde_json::to_string_pretty(event)?)?;
fs::write(&temp_payload_path, serde_json::to_string_pretty(payload)?)?;
let mut child = if cfg!(windows) {
let mut cmd = StdCommand::new("cmd");
cmd.arg("/C").arg(command);
cmd
} else {
let mut cmd = StdCommand::new("sh");
cmd.arg("-lc").arg(command);
cmd
};
child
.env(
"SXMC_WATCH_REASON",
event["reason"].as_str().unwrap_or("change"),
)
.env("SXMC_WATCH_EVENT_PATH", temp_event_path.as_os_str())
.env("SXMC_WATCH_PAYLOAD_PATH", temp_payload_path.as_os_str())
.env("SXMC_WATCH_ROOT", event["root"].as_str().unwrap_or("."));
child.env(
"SXMC_WATCH_NOTIFY_TEMPLATE",
match template {
WatchNotificationTemplate::Standard => "standard",
WatchNotificationTemplate::Compact => "compact",
WatchNotificationTemplate::Slack => "slack",
},
);
let status = child.status()?;
if !status.success() {
eprintln!("[sxmc] Watch notify command exited with status {}", status);
}
let _ = fs::remove_file(temp_event_path);
let _ = fs::remove_file(temp_payload_path);
Ok(())
}
async fn send_watch_webhook(url: &str, headers: &[(String, String)], event: &Value) -> Result<()> {
let mut request = reqwest::Client::new().post(url).json(event);
for (key, value) in headers {
request = request.header(key, value);
}
let response = request.send().await.map_err(|error| {
sxmc::error::SxmcError::Other(format!(
"Failed to POST watch notification to '{}': {}",
url, error
))
})?;
if !response.status().is_success() {
return Err(sxmc::error::SxmcError::Other(format!(
"Watch webhook '{}' returned HTTP {}",
url,
response.status()
)));
}
Ok(())
}
fn compare_host_capabilities(
install_paths: &InstallPaths,
compare_hosts: &[AiClientProfile],
) -> Value {
let hosts = resolved_hosts(compare_hosts);
let capability_map = host_capability_map(install_paths, &hosts);
let mut differences = Vec::new();
for field in ["ready", "doc_present", "config_present"] {
let mut truthy = Vec::new();
let mut falsy = Vec::new();
for host in &hosts {
let spec = cli_surfaces::host_profile_spec(*host);
let key = spec.sidecar_scope;
let value = capability_map
.get(key)
.and_then(|entry| entry.get(field))
.and_then(Value::as_bool)
.unwrap_or(false);
if value {
truthy.push(key);
} else {
falsy.push(key);
}
}
if !truthy.is_empty() && !falsy.is_empty() {
differences.push(json!({
"field": field,
"hosts_true": truthy,
"hosts_false": falsy,
}));
}
}
json!({
"hosts": hosts
.iter()
.map(|host| cli_surfaces::host_profile_spec(*host).sidecar_scope)
.collect::<Vec<_>>(),
"difference_count": differences.len(),
"differences": differences,
})
}
async fn baked_health_value() -> Result<Value> {
let store = BakeStore::load()?;
let mut entries = Vec::new();
let mut by_source_type = serde_json::Map::new();
let mut panels = serde_json::Map::new();
let configs = store.list();
let mut latency_sum_ms = 0u64;
let mut max_latency_ms = 0u64;
let mut slow_count = 0usize;
for config in configs {
let started = Instant::now();
let check = validate_bake_config(config).await;
let latency_ms = started.elapsed().as_millis() as u64;
let source_type = format!("{:?}", config.source_type).to_lowercase();
let healthy = check.is_ok();
let slow = latency_ms >= BAKED_HEALTH_SLOW_MS;
let panel_name = match config.source_type {
SourceType::Stdio | SourceType::Http => "mcp",
SourceType::Api => "api",
SourceType::Spec => "spec",
SourceType::Graphql => "graphql",
};
latency_sum_ms += latency_ms;
max_latency_ms = max_latency_ms.max(latency_ms);
if slow {
slow_count += 1;
}
let entry = by_source_type
.entry(source_type.clone())
.or_insert_with(|| {
json!({
"count": 0,
"healthy_count": 0,
"unhealthy_count": 0,
"slow_count": 0,
"latency_sum_ms": 0,
"avg_latency_ms": 0,
"max_latency_ms": 0,
})
});
if let Some(object) = entry.as_object_mut() {
let count = object.get("count").and_then(Value::as_u64).unwrap_or(0) + 1;
object.insert("count".into(), Value::from(count));
let key = if healthy {
"healthy_count"
} else {
"unhealthy_count"
};
let current = object.get(key).and_then(Value::as_u64).unwrap_or(0) + 1;
object.insert(key.into(), Value::from(current));
let slow_total = object
.get("slow_count")
.and_then(Value::as_u64)
.unwrap_or(0)
+ u64::from(slow);
object.insert("slow_count".into(), Value::from(slow_total));
let latency_total = object
.get("latency_sum_ms")
.and_then(Value::as_u64)
.unwrap_or(0)
+ latency_ms;
object.insert("latency_sum_ms".into(), Value::from(latency_total));
object.insert("avg_latency_ms".into(), Value::from(latency_total / count));
let previous_max = object
.get("max_latency_ms")
.and_then(Value::as_u64)
.unwrap_or(0);
object.insert(
"max_latency_ms".into(),
Value::from(previous_max.max(latency_ms)),
);
}
let panel_entry = json!({
"name": config.name,
"source_type": source_type,
"source": config.source,
"panel": panel_name,
"healthy": healthy,
"latency_ms": latency_ms,
"slow": slow,
"error": check.err().map(|error| error.to_string()),
});
if let Some(object) = panels
.entry(panel_name)
.or_insert_with(|| {
json!({
"count": 0,
"healthy_count": 0,
"unhealthy_count": 0,
"slow_count": 0,
"latency_sum_ms": 0,
"avg_latency_ms": 0,
"max_latency_ms": 0,
"entries": [],
})
})
.as_object_mut()
{
let count = object.get("count").and_then(Value::as_u64).unwrap_or(0) + 1;
object.insert("count".into(), Value::from(count));
let key = if healthy {
"healthy_count"
} else {
"unhealthy_count"
};
let current = object.get(key).and_then(Value::as_u64).unwrap_or(0) + 1;
object.insert(key.into(), Value::from(current));
let slow_total = object
.get("slow_count")
.and_then(Value::as_u64)
.unwrap_or(0)
+ u64::from(slow);
object.insert("slow_count".into(), Value::from(slow_total));
let latency_total = object
.get("latency_sum_ms")
.and_then(Value::as_u64)
.unwrap_or(0)
+ latency_ms;
object.insert("latency_sum_ms".into(), Value::from(latency_total));
object.insert("avg_latency_ms".into(), Value::from(latency_total / count));
let previous_max = object
.get("max_latency_ms")
.and_then(Value::as_u64)
.unwrap_or(0);
object.insert(
"max_latency_ms".into(),
Value::from(previous_max.max(latency_ms)),
);
object
.entry("entries")
.or_insert_with(|| Value::Array(Vec::new()));
if let Some(items) = object.get_mut("entries").and_then(Value::as_array_mut) {
items.push(panel_entry.clone());
}
}
entries.push(panel_entry);
}
let healthy_count = entries
.iter()
.filter(|entry| entry["healthy"].as_bool().unwrap_or(false))
.count();
let total_count = entries.len();
Ok(json!({
"checked_at": Utc::now().to_rfc3339(),
"count": total_count,
"healthy_count": healthy_count,
"unhealthy_count": total_count.saturating_sub(healthy_count),
"slow_count": slow_count,
"slow_threshold_ms": BAKED_HEALTH_SLOW_MS,
"latency_sum_ms": latency_sum_ms,
"avg_latency_ms": if total_count == 0 { 0 } else { latency_sum_ms / total_count as u64 },
"max_latency_ms": max_latency_ms,
"by_source_type": by_source_type,
"panels": panels,
"entries": entries,
}))
}
fn status_has_unhealthy_baked_health(value: &Value) -> bool {
value["baked_health"]["unhealthy_count"]
.as_u64()
.unwrap_or(0)
> 0
}
async fn status_value_with_health(
install_paths: &InstallPaths,
only_hosts: &[AiClientProfile],
compare_hosts: &[AiClientProfile],
include_health: bool,
) -> Result<Value> {
let mut value = status_value(install_paths, only_hosts)?;
if let Some(object) = value.as_object_mut() {
let host_capabilities = host_capability_value(install_paths, only_hosts);
object.insert("host_capabilities".into(), host_capabilities);
let saved_profiles = object
.get("saved_profiles")
.cloned()
.unwrap_or_else(|| json!({}));
let inventory = saved_profiles
.get("inventory")
.cloned()
.unwrap_or_else(|| json!({}));
let drift = saved_profiles
.get("drift")
.cloned()
.unwrap_or_else(|| json!({}));
let ai_knowledge = ai_knowledge_value(install_paths, only_hosts, &inventory, &drift);
object.insert("ai_knowledge".into(), ai_knowledge.clone());
let sync_state = sync_state_summary_value(install_paths, &drift);
object.insert("sync_state".into(), sync_state.clone());
object.insert(
"recovery_plan".into(),
status_recovery_plan_value(install_paths, &ai_knowledge, &sync_state),
);
if compare_hosts.len() >= 2 {
object.insert(
"host_capability_diff".into(),
compare_host_capabilities(install_paths, compare_hosts),
);
}
if include_health {
object.insert("baked_health".into(), baked_health_value().await?);
}
}
Ok(value)
}
fn should_render_doctor_human(
human: bool,
format: Option<output::StructuredOutputFormat>,
pretty: bool,
stdout_is_tty: bool,
) -> bool {
if human {
return true;
}
format.is_none() && !pretty && stdout_is_tty
}
fn format_doctor_report(value: &Value) -> String {
let mut lines = Vec::new();
let startup_files = value["startup_files"].as_object();
let startup_total = startup_files.map(|files| files.len()).unwrap_or(0);
let startup_present = startup_files
.map(|files| {
files
.values()
.filter(|details| details["present"].as_bool().unwrap_or(false))
.count()
})
.unwrap_or(0);
let portable_profiles_present = value["portable_profile_dir"]["present"]
.as_bool()
.unwrap_or(false);
let portable_profiles_path = value["portable_profile_dir"]["path"]
.as_str()
.unwrap_or_default();
let cache_path = value["cache"]["path"].as_str().unwrap_or_default();
let cache_entries = value["cache"]["entry_count"].as_u64().unwrap_or(0);
let cache_total_bytes = value["cache"]["total_bytes"].as_u64().unwrap_or(0);
let cache_ttl_hours = value["cache"]["default_ttl_secs"].as_u64().unwrap_or(0) / 3600;
let checked_hosts = value["checked_hosts"]
.as_array()
.map(|items| {
items
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ")
})
.unwrap_or_default();
lines.push(format!(
"Root: {}",
value["root"].as_str().unwrap_or("<unknown>")
));
if let Some(scope) = value["install_scope"].as_str() {
lines.push(format!("Install scope: {}", scope));
}
if !checked_hosts.is_empty() {
lines.push(format!("Checked hosts: {}", checked_hosts));
}
lines.push(format!(
"Baked MCP servers: {}",
value["baked_mcp_servers"].as_u64().unwrap_or(0)
));
lines.push(format!(
"Profile cache dir: {} ({})",
if portable_profiles_present {
"present"
} else {
"missing"
},
portable_profiles_path
));
lines.push(format!(
"CLI profile cache: {} entries, {} bytes (TTL: {}h)",
cache_entries, cache_total_bytes, cache_ttl_hours
));
lines.push(format!("Cache path: {}", cache_path));
lines.push(format!(
"Startup files present: {startup_present}/{startup_total}"
));
lines.push(String::new());
lines.push("Startup files:".into());
if let Some(files) = startup_files {
let mut entries: Vec<_> = files.iter().collect();
entries.sort_by(|a, b| a.0.cmp(b.0));
let present: Vec<_> = entries
.iter()
.filter(|(_, details)| details["present"].as_bool().unwrap_or(false))
.collect();
let missing: Vec<_> = entries
.iter()
.filter(|(_, details)| !details["present"].as_bool().unwrap_or(false))
.collect();
if !present.is_empty() {
lines.push(" Present:".into());
for (name, details) in present {
let path = details["path"].as_str().unwrap_or_default();
lines.push(format!(" - {} ({})", name, path));
}
}
if !missing.is_empty() {
lines.push(" Missing:".into());
for (name, details) in missing {
let path = details["path"].as_str().unwrap_or_default();
lines.push(format!(" - {} ({})", name, path));
}
}
}
lines.push(String::new());
lines.push("Recommended first moves:".into());
if let Some(moves) = value["recommended_first_moves"].as_array() {
for (index, item) in moves.iter().enumerate() {
let surface = item["surface"].as_str().unwrap_or("surface");
let command = item["command"].as_str().unwrap_or_default();
let why = item["why"].as_str().unwrap_or_default();
lines.push(format!(
"{}. {} -> `{}`",
index + 1,
surface.replace('_', " "),
command
));
lines.push(format!(" {}", why));
}
}
lines.join("\n")
}
fn print_doctor_report(value: &Value) {
println!("{}", format_doctor_report(value));
}
fn format_status_report(value: &Value) -> String {
let mut lines = vec![format_doctor_report(value)];
if let Some(ai_hosts) = value["ai_knowledge"]["hosts"].as_object() {
let configured = value["ai_knowledge"]["configured_host_count"]
.as_u64()
.unwrap_or(0);
let stale = value["ai_knowledge"]["stale_host_count"]
.as_u64()
.unwrap_or(0);
let unconfigured = value["ai_knowledge"]["unconfigured_host_count"]
.as_u64()
.unwrap_or(0);
lines.push(String::new());
lines.push("AI knowledge status".into());
lines.push(format!(
"Hosts: {} configured, {} stale, {} needing setup",
configured, stale, unconfigured
));
let mut entries = ai_hosts.iter().collect::<Vec<_>>();
entries.sort_by(|a, b| a.0.cmp(b.0));
for (_, details) in entries {
lines.push(format!(
"- {}: {}",
details["label"].as_str().unwrap_or("<unknown>"),
details["summary"].as_str().unwrap_or("status unavailable")
));
if let Some(command) = details["recommended_command"].as_str() {
lines.push(format!(" next: `{}`", command));
}
}
}
let saved_profiles = &value["saved_profiles"];
lines.push(String::new());
lines.push("Saved CLI profiles".into());
lines.push(format!(
"Path: {}",
saved_profiles["path"].as_str().unwrap_or("<unknown>")
));
let drift = &saved_profiles["drift"];
lines.push(format!(
"Profiles: {} total, {} changed, {} unchanged, {} errors",
drift["count"].as_u64().unwrap_or(0),
drift["changed_count"].as_u64().unwrap_or(0),
drift["unchanged_count"].as_u64().unwrap_or(0),
drift["error_count"].as_u64().unwrap_or(0)
));
let inventory = &saved_profiles["inventory"];
lines.push(format!(
"Quality/Freshness: {} ready, {} not ready, {} stale, {} unknown freshness, {} inventory errors",
inventory["ready_count"].as_u64().unwrap_or(0),
inventory["not_ready_count"].as_u64().unwrap_or(0),
inventory["stale_count"].as_u64().unwrap_or(0),
inventory["unknown_freshness_count"].as_u64().unwrap_or(0),
inventory["error_count"].as_u64().unwrap_or(0)
));
let sync_state = &value["sync_state"];
lines.push(String::new());
lines.push("Local sync state".into());
lines.push(format!(
"State file: {} ({})",
sync_state["path"].as_str().unwrap_or("<unknown>"),
if sync_state["present"].as_bool().unwrap_or(false) {
"present"
} else {
"missing"
}
));
if let Some(last_synced_at) = sync_state["last_synced_at"].as_str() {
lines.push(format!("Last synced at: {}", last_synced_at));
}
lines.push(format!(
"Sync drift: {} profile(s) need reconciliation",
sync_state["current_drift_count"].as_u64().unwrap_or(0)
));
if let Some(commands) = sync_state["commands_needing_sync"].as_array() {
if !commands.is_empty() {
lines.push("Commands needing sync:".into());
for command in commands.iter().filter_map(Value::as_str).take(5) {
lines.push(format!("- {}", command));
}
}
}
if let Some(entries) = drift["entries"].as_array() {
let changed = entries
.iter()
.filter(|entry| entry["changed"].as_bool().unwrap_or(false))
.take(5)
.collect::<Vec<_>>();
if !changed.is_empty() {
lines.push("Changed profiles:".into());
for entry in changed {
lines.push(format!(
"- {} ({})",
entry["command"].as_str().unwrap_or("<unknown>"),
entry["path"].as_str().unwrap_or("<unknown>")
));
}
}
}
if let Some(entries) = inventory["entries"].as_array() {
let stale = entries
.iter()
.filter(|entry| entry["freshness"]["stale"].as_bool().unwrap_or(false))
.take(5)
.collect::<Vec<_>>();
if !stale.is_empty() {
lines.push("Stale profiles:".into());
for entry in stale {
lines.push(format!(
"- {} ({})",
entry["command"].as_str().unwrap_or("<unknown>"),
entry["path"].as_str().unwrap_or("<unknown>")
));
}
}
}
if let Some(hosts) = value["host_capabilities"].as_object() {
lines.push(String::new());
lines.push("Host capabilities".into());
let mut entries = hosts.iter().collect::<Vec<_>>();
entries.sort_by(|a, b| a.0.cmp(b.0));
for (key, details) in entries {
let label = details["label"].as_str().unwrap_or(key);
let doc_present = details["doc_present"].as_bool().unwrap_or(false);
let config_present = details["config_present"].as_bool().unwrap_or(false);
let ready = details["ready"].as_bool().unwrap_or(false);
lines.push(format!(
"- {}: ready={} doc_present={} config_present={}",
label, ready, doc_present, config_present
));
}
}
if let Some(diff) = value.get("host_capability_diff") {
lines.push(String::new());
lines.push(format!(
"Host capability comparison: {} differing field(s)",
diff["difference_count"].as_u64().unwrap_or(0)
));
if let Some(entries) = diff["differences"].as_array() {
for entry in entries {
let field = entry["field"].as_str().unwrap_or("field");
let hosts_true = entry["hosts_true"]
.as_array()
.map(|items| {
items
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ")
})
.unwrap_or_default();
let hosts_false = entry["hosts_false"]
.as_array()
.map(|items| {
items
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ")
})
.unwrap_or_default();
lines.push(format!(
"- {}: true on [{}], false on [{}]",
field, hosts_true, hosts_false
));
}
}
}
if let Some(items) = value["recovery_plan"]["items"].as_array() {
if !items.is_empty() {
lines.push(String::new());
lines.push("Suggested fixes".into());
for item in items {
let severity = item["severity"].as_str().unwrap_or("info");
lines.push(format!(
"- {} [{}]: {}",
item["label"].as_str().unwrap_or("<unknown>"),
severity,
item["summary"].as_str().unwrap_or("repair needed")
));
lines.push(format!(
" run: `{}`",
item["command"].as_str().unwrap_or("sxmc status")
));
if let Some(alternatives) = item["alternatives"].as_array() {
for alternative in alternatives.iter().take(2) {
let command = alternative["command"].as_str().unwrap_or("sxmc status");
let summary = alternative["summary"]
.as_str()
.unwrap_or("alternate remediation path");
lines.push(format!(" also: `{}` ({})", command, summary));
}
}
}
}
}
if let Some(health) = value.get("baked_health") {
lines.push(String::new());
lines.push(format!(
"Baked connection health: {} healthy, {} unhealthy, {} slow ({} total)",
health["healthy_count"].as_u64().unwrap_or(0),
health["unhealthy_count"].as_u64().unwrap_or(0),
health["slow_count"].as_u64().unwrap_or(0),
health["count"].as_u64().unwrap_or(0)
));
if let Some(checked_at) = health["checked_at"].as_str() {
lines.push(format!("Checked at: {}", checked_at));
}
lines.push(format!(
"Latency: avg {}ms, max {}ms, slow threshold {}ms",
health["avg_latency_ms"].as_u64().unwrap_or(0),
health["max_latency_ms"].as_u64().unwrap_or(0),
health["slow_threshold_ms"].as_u64().unwrap_or(0)
));
if let Some(by_type) = health["by_source_type"].as_object() {
let mut entries = by_type.iter().collect::<Vec<_>>();
entries.sort_by(|a, b| a.0.cmp(b.0));
for (source_type, details) in entries {
lines.push(format!(
"- {}: {} healthy, {} unhealthy, {} slow ({} total, avg {}ms, max {}ms)",
source_type,
details["healthy_count"].as_u64().unwrap_or(0),
details["unhealthy_count"].as_u64().unwrap_or(0),
details["slow_count"].as_u64().unwrap_or(0),
details["count"].as_u64().unwrap_or(0),
details["avg_latency_ms"].as_u64().unwrap_or(0),
details["max_latency_ms"].as_u64().unwrap_or(0)
));
}
}
if let Some(panels) = health["panels"].as_object() {
let mut entries = panels.iter().collect::<Vec<_>>();
entries.sort_by(|a, b| a.0.cmp(b.0));
for (panel, details) in entries {
lines.push(format!(
"- panel {}: {} healthy, {} unhealthy, {} slow ({} total, avg {}ms, max {}ms)",
panel,
details["healthy_count"].as_u64().unwrap_or(0),
details["unhealthy_count"].as_u64().unwrap_or(0),
details["slow_count"].as_u64().unwrap_or(0),
details["count"].as_u64().unwrap_or(0),
details["avg_latency_ms"].as_u64().unwrap_or(0),
details["max_latency_ms"].as_u64().unwrap_or(0)
));
}
}
if let Some(entries) = health["entries"].as_array() {
for entry in entries
.iter()
.filter(|entry| !entry["healthy"].as_bool().unwrap_or(false))
.take(5)
{
lines.push(format!(
"- {} [{}] {}ms: {}",
entry["name"].as_str().unwrap_or("<unknown>"),
entry["source_type"].as_str().unwrap_or("unknown"),
entry["latency_ms"].as_u64().unwrap_or(0),
entry["error"].as_str().unwrap_or("unhealthy")
));
}
}
}
lines.join("\n")
}
fn print_status_report(value: &Value) {
println!("{}", format_status_report(value));
}
fn render_status_output(
value: &Value,
format: Option<output::StructuredOutputFormat>,
pretty: bool,
stdout_is_tty: bool,
) -> String {
if should_render_doctor_human(false, format, pretty, stdout_is_tty) {
format_status_report(value)
} else {
let format = output::resolve_structured_format(format, pretty);
output::format_structured_value(value, format)
}
}
fn print_drift_report(value: &Value) {
println!(
"Saved CLI profile drift: {} changed, {} unchanged, {} errors ({} total)",
value["changed_count"].as_u64().unwrap_or(0),
value["unchanged_count"].as_u64().unwrap_or(0),
value["error_count"].as_u64().unwrap_or(0),
value["count"].as_u64().unwrap_or(0)
);
if let Some(entries) = value["entries"].as_array() {
for entry in entries {
if let Some(error) = entry["error"].as_str() {
println!(
"- {}: error: {}",
entry["path"].as_str().unwrap_or("<unknown>"),
error
);
} else {
println!(
"- {}: {}",
entry["command"].as_str().unwrap_or("<unknown>"),
if entry["changed"].as_bool().unwrap_or(false) {
"changed"
} else {
"unchanged"
}
);
}
}
}
}
fn print_batch_inspect_report(value: &Value, compact: bool) {
let count = value["count"].as_u64().unwrap_or(0);
let inspected_count = value["inspected_count"].as_u64().unwrap_or(count);
let success_count = value["success_count"].as_u64().unwrap_or(0);
let failed_count = value["failed_count"].as_u64().unwrap_or(0);
let skipped_count = value["skipped_count"].as_u64().unwrap_or(0);
println!(
"Inspected {} of {} command(s): {} succeeded, {} failed, {} skipped",
inspected_count, count, success_count, failed_count, skipped_count
);
if let Some(profiles) = value["profiles"].as_array() {
for profile in profiles {
let command = profile["command"].as_str().unwrap_or("<unknown>");
let summary = profile["summary"].as_str().unwrap_or_default();
if compact {
let subcommand_count = profile["subcommand_count"].as_u64().unwrap_or(0);
let option_count = profile["option_count"].as_u64().unwrap_or(0);
println!(
"- {}: {} ({} subcommands, {} options)",
command, summary, subcommand_count, option_count
);
} else {
let subcommand_count = profile["subcommands"]
.as_array()
.map(|items| items.len())
.unwrap_or(0);
let option_count = profile["options"]
.as_array()
.map(|items| items.len())
.unwrap_or(0);
println!(
"- {}: {} ({} subcommands, {} options)",
command, summary, subcommand_count, option_count
);
}
}
}
if let Some(failures) = value["failures"].as_array() {
if !failures.is_empty() {
println!();
println!("Failures:");
for failure in failures {
println!(
"- {}: {}",
failure["command"].as_str().unwrap_or("<unknown>"),
failure["error"].as_str().unwrap_or("unknown error")
);
}
}
}
if let Some(skipped) = value["skipped"].as_array() {
if !skipped.is_empty() {
println!();
println!("Skipped:");
for entry in skipped {
println!(
"- {}: {}",
entry["command"].as_str().unwrap_or("<unknown>"),
entry["reason"].as_str().unwrap_or("skipped")
);
}
}
}
if let Some(output_dir) = value["output_dir"].as_str() {
println!();
println!(
"Saved {} profile file(s) to {}",
value["written_profile_count"].as_u64().unwrap_or(0),
output_dir
);
}
}
fn format_batch_toon(value: &Value, compact: bool) -> String {
let mut lines = Vec::new();
lines.push(format!("count: {}", value["count"].as_u64().unwrap_or(0)));
lines.push(format!(
"inspected_count: {}",
value["inspected_count"]
.as_u64()
.unwrap_or_else(|| value["count"].as_u64().unwrap_or(0))
));
lines.push(format!(
"parallelism: {}",
value["parallelism"].as_u64().unwrap_or(0)
));
lines.push(format!(
"success_count: {}",
value["success_count"].as_u64().unwrap_or(0)
));
lines.push(format!(
"failed_count: {}",
value["failed_count"].as_u64().unwrap_or(0)
));
lines.push(format!(
"skipped_count: {}",
value["skipped_count"].as_u64().unwrap_or(0)
));
lines.push(String::new());
lines.push("profiles:".into());
if let Some(profiles) = value["profiles"].as_array() {
for profile in profiles {
let command = profile["command"].as_str().unwrap_or("<unknown>");
let summary = profile["summary"].as_str().unwrap_or_default();
if compact {
lines.push(format!(
"- {}: {} ({} subcommands, {} options)",
command,
summary,
profile["subcommand_count"].as_u64().unwrap_or(0),
profile["option_count"].as_u64().unwrap_or(0)
));
} else {
let subcommand_count = profile["subcommands"]
.as_array()
.map(|items| items.len())
.unwrap_or(0);
let option_count = profile["options"]
.as_array()
.map(|items| items.len())
.unwrap_or(0);
lines.push(format!(
"- {}: {} ({} subcommands, {} options)",
command, summary, subcommand_count, option_count
));
}
}
}
if let Some(failures) = value["failures"].as_array() {
if !failures.is_empty() {
lines.push(String::new());
lines.push("failures:".into());
for failure in failures {
lines.push(format!(
"- {}: {}",
failure["command"].as_str().unwrap_or("<unknown>"),
failure["error"].as_str().unwrap_or("unknown error")
));
}
}
}
if let Some(skipped) = value["skipped"].as_array() {
if !skipped.is_empty() {
lines.push(String::new());
lines.push("skipped:".into());
for entry in skipped {
lines.push(format!(
"- {}: {}",
entry["command"].as_str().unwrap_or("<unknown>"),
entry["reason"].as_str().unwrap_or("skipped")
));
}
}
}
if let Some(output_dir) = value["output_dir"].as_str() {
lines.push(String::new());
lines.push(format!(
"saved_profiles: {} -> {}",
value["written_profile_count"].as_u64().unwrap_or(0),
output_dir
));
}
lines.join("\n")
}
fn format_diff_toon(value: &Value) -> String {
let mut lines = Vec::new();
lines.push(format!(
"command: {}",
value["command"].as_str().unwrap_or("<unknown>")
));
lines.push(format!(
"summary_changed: {}",
value["summary_changed"].as_bool().unwrap_or(false)
));
if let Some(before) = value["before_summary"].as_str() {
lines.push(format!("before_summary: {}", before));
}
if let Some(after) = value["after_summary"].as_str() {
lines.push(format!("after_summary: {}", after));
}
if let Some(note) = value["migration_note"].as_str() {
lines.push(format!("migration_note: {}", note));
}
let add_list = |lines: &mut Vec<String>, label: &str, field: &Value| {
if let Some(items) = field.as_array() {
if !items.is_empty() {
lines.push(String::new());
lines.push(format!("{}:", label));
for item in items {
lines.push(format!("- {}", item.as_str().unwrap_or("<unknown>")));
}
}
}
};
add_list(&mut lines, "subcommands_added", &value["subcommands_added"]);
add_list(
&mut lines,
"subcommands_removed",
&value["subcommands_removed"],
);
add_list(&mut lines, "options_added", &value["options_added"]);
add_list(&mut lines, "options_removed", &value["options_removed"]);
add_list(&mut lines, "environment_added", &value["environment_added"]);
add_list(
&mut lines,
"environment_removed",
&value["environment_removed"],
);
lines.join("\n")
}
fn slugify_loose(input: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in input.chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn compact_value_from_full_profile_value(profile: &Value) -> Value {
serde_json::from_value::<cli_surfaces::CliSurfaceProfile>(profile.clone())
.map(|profile| cli_surfaces::compact_profile_value(&profile))
.unwrap_or_else(|_| profile.clone())
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
enum BatchOutputWriteMode {
Unique,
Overwrite,
SkipExisting,
}
fn diff_value_has_changes(value: &Value) -> bool {
value["summary_changed"].as_bool().unwrap_or(false)
|| value["description_changed"].as_bool().unwrap_or(false)
|| !value["subcommands_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["subcommands_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["options_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["options_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["environment_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["environment_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| value["before_generation_depth"] != value["after_generation_depth"]
|| value["before_nested_profile_count"] != value["after_nested_profile_count"]
}
fn codebase_diff_has_changes(value: &Value) -> bool {
value["manifest_count_changed"].as_bool().unwrap_or(false)
|| value["task_runner_count_changed"]
.as_bool()
.unwrap_or(false)
|| value["entrypoint_count_changed"].as_bool().unwrap_or(false)
|| value["config_count_changed"].as_bool().unwrap_or(false)
|| !value["project_kinds_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["project_kinds_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["manifests_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["manifests_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["task_runners_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["task_runners_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["entrypoints_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["entrypoints_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["configs_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["configs_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["recommended_commands_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["recommended_commands_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
}
fn traffic_diff_has_changes(value: &Value) -> bool {
value["request_count_changed"].as_bool().unwrap_or(false)
|| value["endpoint_count_changed"].as_bool().unwrap_or(false)
|| !value["endpoints_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["endpoints_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["status_codes_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["status_codes_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["content_types_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["content_types_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
}
fn graphql_diff_has_changes(value: &Value) -> bool {
value["query_type_changed"].as_bool().unwrap_or(false)
|| value["mutation_type_changed"].as_bool().unwrap_or(false)
|| value["operation_count_changed"].as_bool().unwrap_or(false)
|| value["type_count_changed"].as_bool().unwrap_or(false)
|| !value["operations_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["operations_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["types_added"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
|| !value["types_removed"]
.as_array()
.map(|items| items.is_empty())
.unwrap_or(true)
}
fn resolve_batch_profile_output_path(
output_dir: &Path,
command: &str,
slug_counts: &mut HashMap<String, usize>,
) -> (String, PathBuf) {
let mut slug = slugify_loose(command);
if slug.is_empty() {
slug = "profile".into();
}
let count = slug_counts.entry(slug.clone()).or_insert(0);
*count += 1;
loop {
let file_name = if *count == 1 {
format!("{slug}.json")
} else {
format!("{slug}-{}.json", *count)
};
let path = output_dir.join(&file_name);
if !path.exists() {
return (slug, path);
}
*count += 1;
}
}
fn write_batch_profile_file(
output_dir: &Path,
command: &str,
profile: &Value,
compact: bool,
slug_counts: &mut HashMap<String, usize>,
write_mode: BatchOutputWriteMode,
) -> Result<Value> {
fs::create_dir_all(output_dir)?;
let rendered_value = if compact {
compact_value_from_full_profile_value(profile)
} else {
profile.clone()
};
let path = match write_mode {
BatchOutputWriteMode::Unique => {
resolve_batch_profile_output_path(output_dir, command, slug_counts).1
}
BatchOutputWriteMode::Overwrite | BatchOutputWriteMode::SkipExisting => {
let mut slug = slugify_loose(command);
if slug.is_empty() {
slug = "profile".into();
}
output_dir.join(format!("{slug}.json"))
}
};
let existed = path.exists();
if matches!(write_mode, BatchOutputWriteMode::SkipExisting) && path.exists() {
return Ok(json!({
"command": command,
"path": path.display().to_string(),
"compact": compact,
"action": "skipped_existing",
}));
}
fs::write(&path, serde_json::to_string_pretty(&rendered_value)?)?;
Ok(json!({
"command": command,
"path": path.display().to_string(),
"compact": compact,
"action": if matches!(write_mode, BatchOutputWriteMode::Overwrite) && existed {
"overwritten"
} else {
"written"
},
}))
}
fn attach_batch_output_dir_metadata(
value: &mut Value,
output_dir: &Path,
written_profiles: &[Value],
) {
let written_count = written_profiles
.iter()
.filter(|entry| entry["action"].as_str().unwrap_or("written") != "skipped_existing")
.count();
let skipped_existing_count = written_profiles
.iter()
.filter(|entry| entry["action"].as_str() == Some("skipped_existing"))
.count();
if let Some(object) = value.as_object_mut() {
object.insert(
"output_dir".into(),
Value::String(output_dir.display().to_string()),
);
object.insert(
"written_profile_count".into(),
Value::from(written_count as u64),
);
object.insert(
"skipped_existing_count".into(),
Value::from(skipped_existing_count as u64),
);
object.insert(
"written_profiles".into(),
Value::Array(written_profiles.to_vec()),
);
}
}
fn write_batch_manifest_file(output_dir: &Path, value: &Value) -> Result<PathBuf> {
fs::create_dir_all(output_dir)?;
let path = output_dir.join("batch-summary.json");
fs::write(&path, serde_json::to_string_pretty(value)?)?;
Ok(path)
}
fn batch_event_for_output(event: &Value, compact: bool) -> Value {
match event["type"].as_str().unwrap_or_default() {
"profile" => {
let profile = if compact {
compact_value_from_full_profile_value(&event["profile"])
} else {
event["profile"].clone()
};
json!({
"type": "profile",
"command": event["command"],
"profile": profile,
})
}
_ => event.clone(),
}
}
fn format_diff_markdown(value: &Value) -> String {
let mut lines = Vec::new();
lines.push(format!(
"# CLI Diff: `{}`",
value["command"].as_str().unwrap_or("<unknown>")
));
lines.push(String::new());
lines.push(format!(
"- Summary changed: `{}`",
value["summary_changed"].as_bool().unwrap_or(false)
));
if let Some(before) = value["before_summary"].as_str() {
lines.push(format!("- Before summary: {}", before));
}
if let Some(after) = value["after_summary"].as_str() {
lines.push(format!("- After summary: {}", after));
}
if let Some(note) = value["migration_note"].as_str() {
lines.push(format!("- Migration note: {}", note));
}
let mut push_section = |title: &str, field: &Value| {
if let Some(items) = field.as_array() {
if !items.is_empty() {
lines.push(String::new());
lines.push(format!("## {}", title));
lines.push(String::new());
for item in items {
lines.push(format!("- `{}`", item.as_str().unwrap_or("<unknown>")));
}
}
}
};
push_section("Added subcommands", &value["subcommands_added"]);
push_section("Removed subcommands", &value["subcommands_removed"]);
push_section("Added options", &value["options_added"]);
push_section("Removed options", &value["options_removed"]);
push_section("Added environment", &value["environment_added"]);
push_section("Removed environment", &value["environment_removed"]);
lines.join("\n")
}
fn diff_display_value(value: &Value, format: DiffOutputFormat) -> String {
if matches!(format, DiffOutputFormat::Toon) {
format_diff_toon(value)
} else if matches!(format, DiffOutputFormat::Markdown) {
format_diff_markdown(value)
} else {
output::format_structured_value(value, format.as_structured().unwrap())
}
}
fn resolve_diff_output_format(format: Option<DiffOutputFormat>, pretty: bool) -> DiffOutputFormat {
format.unwrap_or(if pretty {
DiffOutputFormat::JsonPretty
} else {
DiffOutputFormat::Json
})
}
fn print_cache_stats_report(value: &Value) {
println!("CLI profile cache");
println!("Path: {}", value["path"].as_str().unwrap_or("<unknown>"));
println!("Entries: {}", value["entry_count"].as_u64().unwrap_or(0));
println!("Size: {} bytes", value["total_bytes"].as_u64().unwrap_or(0));
println!(
"Default TTL: {} seconds",
value["default_ttl_secs"].as_u64().unwrap_or(0)
);
}
fn print_cache_warm_report(value: &Value) {
println!(
"Warmed {} CLI profile(s) with parallelism {} ({} failures, {} skipped)",
value["warmed_count"].as_u64().unwrap_or(0),
value["parallelism"].as_u64().unwrap_or(0),
value["failed_count"].as_u64().unwrap_or(0),
value["skipped_count"].as_u64().unwrap_or(0)
);
}
fn print_migrated_profile_report(value: &Value) {
println!(
"Migrated CLI profile for `{}`",
value["command"].as_str().unwrap_or("<unknown>")
);
if let Some(input) = value["input"].as_str() {
println!("Input: {}", input);
}
if let Some(output) = value["output"].as_str() {
println!("Output: {}", output);
}
println!(
"Schema: {}",
value["profile_schema"].as_str().unwrap_or("<unknown>")
);
}
fn print_corpus_stats_report(value: &Value) {
println!("Profile corpus");
println!(
"Entries: {} (profiles: {}, errors: {})",
value["count"].as_u64().unwrap_or(0),
value["profile_count"].as_u64().unwrap_or(0),
value["error_count"].as_u64().unwrap_or(0)
);
println!(
"Commands: {} | Ready: {} | Stale: {} | Avg quality: {:.1}",
value["command_count"].as_u64().unwrap_or(0),
value["ready_count"].as_u64().unwrap_or(0),
value["stale_count"].as_u64().unwrap_or(0),
value["average_quality_score"].as_f64().unwrap_or(0.0)
);
}
fn print_corpus_query_report(value: &Value) {
println!(
"Corpus query: {} match(es)",
value["match_count"].as_u64().unwrap_or(0)
);
if let Some(entries) = value["entries"].as_array() {
for entry in entries {
println!(
"- {}: {} [quality={} stale={}]",
entry["command"].as_str().unwrap_or("<unknown>"),
entry["summary"].as_str().unwrap_or_default(),
entry["quality"]["score"].as_u64().unwrap_or(0),
entry["freshness"]["stale"].as_bool().unwrap_or(false)
);
}
}
}
async fn validate_bake_config(config: &BakeConfig) -> Result<()> {
match config.source_type {
SourceType::Stdio | SourceType::Http => {
let client = ConnectedMcpClient::connect(config).await.map_err(|error| {
let base = format!(
"Bake '{}' could not connect during validation: {}",
config.name, error
);
sxmc::error::SxmcError::Other(augment_bake_validation_message(
config,
&base,
&error.to_string(),
))
})?;
let result = client.list_tools().await.map_err(|error| {
let base = format!(
"Bake '{}' connected but list_tools failed during validation: {}",
config.name, error
);
sxmc::error::SxmcError::Other(augment_bake_validation_message(
config,
&base,
&error.to_string(),
))
});
client.close().await?;
result.map(|_| ())
}
SourceType::Api => {
let headers = parse_headers(&config.auth_headers)?;
api::ApiClient::connect(
&config.source,
&headers,
parse_timeout(config.timeout_seconds.or(Some(10))),
)
.await
.map(|_| ())
.map_err(|error| {
let base = format!(
"Bake '{}' could not validate API source '{}': {}",
config.name, config.source, error
);
sxmc::error::SxmcError::Other(augment_bake_validation_message(
config,
&base,
&error.to_string(),
))
})
}
SourceType::Spec => {
let headers = parse_headers(&config.auth_headers)?;
openapi::OpenApiSpec::load(
&config.source,
&headers,
parse_timeout(config.timeout_seconds.or(Some(10))),
)
.await
.map(|_| ())
.map_err(|error| {
let base = format!(
"Bake '{}' could not validate OpenAPI source '{}': {}",
config.name, config.source, error
);
sxmc::error::SxmcError::Other(augment_bake_validation_message(
config,
&base,
&error.to_string(),
))
})
}
SourceType::Graphql => {
let headers = parse_headers(&config.auth_headers)?;
graphql::GraphQLClient::connect(
&config.source,
&headers,
parse_timeout(config.timeout_seconds.or(Some(10))),
)
.await
.map(|_| ())
.map_err(|error| {
let base = format!(
"Bake '{}' could not validate GraphQL source '{}': {}",
config.name, config.source, error
);
sxmc::error::SxmcError::Other(augment_bake_validation_message(
config,
&base,
&error.to_string(),
))
})
}
}
}
fn augment_bake_validation_message(config: &BakeConfig, base: &str, detail: &str) -> String {
let mut hints = Vec::new();
let lowered = detail.to_ascii_lowercase();
match config.source_type {
SourceType::Stdio => {
hints.push("Run the stdio command directly once to confirm it starts and speaks MCP over stdout.".to_string());
if lowered.contains("command not found")
|| lowered.contains("no such file or directory")
{
hints.push("The configured executable was not found on PATH. Use a full path, install the tool first, or wrap npm-based servers with `npx`.".to_string());
}
if config.source.contains("npx") {
hints.push("If this is an npm MCP server, verify the package name manually with `npx ... --help` or install it globally before baking it.".to_string());
}
if config.source.contains("python")
|| config.source.contains(".py")
|| config.source.contains("uv ")
|| config.source.contains("uvx ")
{
hints.push("For Python-backed servers, confirm the virtualenv or tool runner is available in the same environment where sxmc will execute the bake.".to_string());
}
if config.source.contains("docker") || config.source.contains("podman") {
hints.push("For container-backed servers, confirm the image exists locally and that the command keeps stdin/stdout attached for MCP traffic.".to_string());
}
}
SourceType::Http => {
hints.push("Check that the HTTP MCP server is already running and that the URL points at its streamable MCP endpoint (often `/mcp`).".to_string());
if lowered.contains("401")
|| lowered.contains("403")
|| lowered.contains("unauthorized")
{
hints.push("Validation reached the server but auth failed. Re-check `--auth-header` values or bearer-token setup.".to_string());
}
if lowered.contains("connection refused")
|| lowered.contains("timed out")
|| lowered.contains("dns")
|| lowered.contains("connect")
{
hints.push("If the server is intentionally offline right now, re-run with `--skip-validate` and bring it up before calling it later.".to_string());
}
}
SourceType::Api => {
hints.push("Verify the API spec URL is reachable and that any auth headers or timeout settings are correct.".to_string());
if lowered.contains("401")
|| lowered.contains("403")
|| lowered.contains("unauthorized")
{
hints.push("The API rejected auth during validation. Re-check tokens, headers, and whether the endpoint expects a different auth scheme.".to_string());
}
if !config.source.ends_with(".json")
&& !config.source.ends_with(".yaml")
&& !config.source.ends_with(".yml")
{
hints.push("If this is an API docs page rather than a machine-readable spec, bake the raw OpenAPI URL instead of the human HTML page.".to_string());
}
}
SourceType::Spec => {
hints.push("Confirm the OpenAPI document URL/file is valid JSON or YAML and reachable from this machine.".to_string());
if lowered.contains("401")
|| lowered.contains("403")
|| lowered.contains("unauthorized")
{
hints.push("The spec endpoint likely needs auth. Re-check `--auth-header` values or fetch the spec once manually first.".to_string());
}
}
SourceType::Graphql => {
hints.push("Verify the GraphQL endpoint is reachable and supports the schema/introspection flow expected by `sxmc graphql`.".to_string());
if lowered.contains("401")
|| lowered.contains("403")
|| lowered.contains("unauthorized")
{
hints.push("The GraphQL endpoint rejected auth during validation. Re-check tokens and headers.".to_string());
}
if lowered.contains("introspection")
|| lowered.contains("schema")
|| lowered.contains("field")
{
hints.push("If introspection is disabled in production, validate against a staging/schema endpoint or save the bake with `--skip-validate` until a schema source is available.".to_string());
}
}
}
hints.push("If you intentionally want to save an offline or placeholder target, re-run with `--skip-validate`.".to_string());
let mut message = base.to_string();
if !hints.is_empty() {
message.push_str("\nHints:");
for hint in hints {
message.push_str("\n- ");
message.push_str(&hint);
}
}
message
}
fn ai_client_id(client: AiClientProfile) -> &'static str {
cli_surfaces::host_profile_spec(client).sidecar_scope
}
fn host_value(client: AiClientProfile) -> Value {
json!({
"id": ai_client_id(client),
"label": ai_client_display_name(client),
})
}
fn profile_summary_value(profile: &cli_surfaces::CliSurfaceProfile) -> Value {
let quality = profile.quality_report();
json!({
"command": profile.command,
"summary": profile.summary,
"subcommand_count": profile.subcommands.len(),
"option_count": profile.options.len(),
"interactive": profile.interactive,
"interactive_reasons": profile.interactive_reasons,
"non_interactive_alternatives": profile.non_interactive_alternatives,
"quality": {
"ready_for_agent_docs": quality.ready_for_agent_docs,
"score": quality.score,
"level": quality.level,
"reasons": quality.reasons,
}
})
}
fn write_status_name(status: cli_surfaces::WriteStatus) -> &'static str {
match status {
cli_surfaces::WriteStatus::Created => "created",
cli_surfaces::WriteStatus::Updated => "updated",
cli_surfaces::WriteStatus::Skipped => "skipped",
cli_surfaces::WriteStatus::Removed => "removed",
}
}
fn artifact_mode_name(mode: ArtifactMode) -> &'static str {
match mode {
ArtifactMode::Preview => "preview",
ArtifactMode::WriteSidecar => "write_sidecar",
ArtifactMode::Patch => "patch",
ArtifactMode::Apply => "apply",
}
}
fn write_outcomes_value(outcomes: &[cli_surfaces::WriteOutcome]) -> Value {
Value::Array(
outcomes
.iter()
.map(|outcome| {
json!({
"label": outcome.label,
"path": outcome.path.display().to_string(),
"mode": artifact_mode_name(outcome.mode),
"status": write_status_name(outcome.status),
})
})
.collect(),
)
}
fn write_outcome_summary_value(outcomes: &[cli_surfaces::WriteOutcome]) -> Value {
let mut created = 0usize;
let mut updated = 0usize;
let mut skipped = 0usize;
let mut removed = 0usize;
for outcome in outcomes {
match outcome.status {
cli_surfaces::WriteStatus::Created => created += 1,
cli_surfaces::WriteStatus::Updated => updated += 1,
cli_surfaces::WriteStatus::Skipped => skipped += 1,
cli_surfaces::WriteStatus::Removed => removed += 1,
}
}
json!({
"created": created,
"updated": updated,
"skipped": skipped,
"removed": removed,
"total": outcomes.len(),
})
}
fn sync_mode_name(apply: bool) -> &'static str {
if apply {
"apply"
} else {
"preview"
}
}
fn profile_sha256(profile: &cli_surfaces::CliSurfaceProfile) -> Result<String> {
Ok(sha256_hex(&serde_json::to_vec(profile)?))
}
fn sync_profile_write_outcome(
path: &Path,
mode: ArtifactMode,
changed: bool,
) -> cli_surfaces::WriteOutcome {
cli_surfaces::WriteOutcome {
label: "CLI profile".into(),
path: path.to_path_buf(),
mode,
status: if changed {
cli_surfaces::WriteStatus::Updated
} else {
cli_surfaces::WriteStatus::Skipped
},
}
}
fn text_write_outcome(
label: &str,
path: PathBuf,
mode: ArtifactMode,
status: cli_surfaces::WriteStatus,
) -> cli_surfaces::WriteOutcome {
cli_surfaces::WriteOutcome {
label: label.into(),
path,
mode,
status,
}
}
fn materialize_text_outputs(
outputs: &[(String, PathBuf, String)],
mode: ArtifactMode,
root: &Path,
) -> Result<Vec<cli_surfaces::WriteOutcome>> {
let mut outcomes = Vec::new();
for (label, raw_path, content) in outputs {
let path = if raw_path.is_absolute() {
raw_path.clone()
} else {
root.join(raw_path)
};
let existing = fs::read_to_string(&path).ok();
let status = match existing.as_deref() {
Some(current) if current == content => cli_surfaces::WriteStatus::Skipped,
Some(_) => cli_surfaces::WriteStatus::Updated,
None => cli_surfaces::WriteStatus::Created,
};
if matches!(mode, ArtifactMode::Apply | ArtifactMode::WriteSidecar)
&& status != cli_surfaces::WriteStatus::Skipped
{
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&path, content)?;
}
outcomes.push(text_write_outcome(label, path, mode, status));
}
Ok(outcomes)
}
fn discovery_pack_output_entries(
from_snapshot: &Path,
output_dir: &Path,
) -> Result<Vec<(String, PathBuf, String)>> {
let snapshots = discovery_snapshots::load_snapshot_inputs(from_snapshot)?;
let mut outputs = Vec::new();
let mut index_rows = Vec::new();
for entry in snapshots {
let source_type = entry.value["source_type"].as_str().unwrap_or("discovery");
let stem = entry
.path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("snapshot");
let file_name = format!("{}-{}.md", slugify_label(source_type), slugify_label(stem));
let relative_path = output_dir.join(&file_name);
let label = format!("{} discovery scaffold", source_type);
outputs.push((
label,
relative_path.clone(),
render_discovery_snapshot_markdown(&entry),
));
index_rows.push((entry, file_name));
}
let mut lines = vec![
"# Discovery pack".to_string(),
String::new(),
"Generated by `sxmc scaffold discovery-pack` from saved discovery snapshots.".into(),
String::new(),
"## Snapshots".into(),
];
for (entry, file_name) in index_rows {
let source_type = entry.value["source_type"].as_str().unwrap_or("discovery");
lines.push(format!(
"- [{}]({}) — {}",
discovery_snapshot_title(&entry),
file_name,
discovery_snapshot_brief(&entry.value, source_type)
));
}
lines.push(String::new());
lines.push(
"Use these scaffolded docs as review-friendly handoff artifacts for teams, runbooks, or AI host context.".into(),
);
outputs.push((
"Discovery pack index".into(),
output_dir.join("README.md"),
lines.join("\n"),
));
Ok(outputs)
}
fn discovery_tools_output_entries(
from_snapshot: &Path,
output_dir: &Path,
) -> Result<Vec<(String, PathBuf, String)>> {
let snapshots = discovery_snapshots::load_snapshot_inputs(from_snapshot)?;
let mut outputs = Vec::new();
let mut index_lines = vec![
"# Discovery tools".to_string(),
String::new(),
"Generated by `sxmc scaffold discovery-tools` from saved discovery snapshots.".into(),
String::new(),
"## Generated manifests".into(),
];
let mut skipped = Vec::new();
for entry in snapshots {
let source_type = entry.value["source_type"].as_str().unwrap_or("discovery");
let stem = entry
.path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("snapshot");
if let Some((manifest, tool_count)) = discovery_tool_manifest_value(&entry) {
let file_name = format!(
"{}-{}.json",
slugify_label(source_type),
slugify_label(stem)
);
let relative_path = output_dir.join(&file_name);
outputs.push((
format!("{} discovery tool scaffold", source_type),
relative_path,
serde_json::to_string_pretty(&manifest)?,
));
index_lines.push(format!(
"- `{}` — {} tool manifest(s) from [{}]({})",
discovery_snapshot_title(&entry),
tool_count,
file_name,
file_name
));
} else {
skipped.push(format!(
"- `{}` — source type `{}` does not yet emit higher-level tool scaffolds",
discovery_snapshot_title(&entry),
source_type
));
}
}
if !skipped.is_empty() {
index_lines.push(String::new());
index_lines.push("## Skipped snapshots".into());
index_lines.extend(skipped);
}
outputs.push((
"Discovery tools index".into(),
output_dir.join("README.md"),
index_lines.join("\n"),
));
Ok(outputs)
}
fn discovery_tool_manifest_value(
entry: &discovery_snapshots::DiscoverySnapshotEntry,
) -> Option<(Value, usize)> {
let source_type = entry.value["source_type"].as_str().unwrap_or("discovery");
let tools = match source_type {
"graphql" => graphql_discovery_tool_entries(&entry.value),
"database" => database_discovery_tool_entries(&entry.value),
"traffic" => traffic_discovery_tool_entries(&entry.value),
_ => return None,
};
let tool_count = tools.len();
Some((
json!({
"scaffold_schema": "sxmc_scaffold_discovery_tools_v1",
"source_type": source_type,
"source_snapshot": entry.path.display().to_string(),
"title": discovery_snapshot_title(entry),
"tool_count": tool_count,
"generated_tools": tools,
}),
tool_count,
))
}
fn graphql_discovery_tool_entries(value: &Value) -> Vec<Value> {
value["operations"]
.as_array()
.map(|operations| {
operations
.iter()
.filter_map(|operation| {
let name = operation["name"].as_str()?;
let kind = operation["kind"].as_str().unwrap_or("query");
Some(json!({
"name": format!("graphql-{}-{}", kind, slugify_label(name)),
"kind": "graphql-operation",
"operation_name": name,
"operation_kind": kind,
"description": operation["description"].clone(),
"arg_count": operation["arg_count"].as_u64().unwrap_or(0),
"returns_composite": operation["returns_composite"].as_bool().unwrap_or(false),
"source_url": value["url"].clone(),
}))
})
.collect::<Vec<_>>()
})
.unwrap_or_default()
}
fn database_discovery_tool_entries(value: &Value) -> Vec<Value> {
let mut tools = vec![json!({
"name": "database-list-entries",
"kind": "database-browse",
"description": format!(
"Browse discovered {} schema entries.",
value["count"].as_u64().unwrap_or(0)
),
"database_type": value["database_type"].clone(),
"source": value["source"].clone(),
})];
if let Some(entries) = value["entries"].as_array() {
for entry in entries {
let qualified_name = entry["qualified_name"]
.as_str()
.or_else(|| entry["name"].as_str())
.unwrap_or("entry");
tools.push(json!({
"name": format!("database-describe-{}", slugify_label(qualified_name)),
"kind": "database-describe",
"entry_name": entry["name"].clone(),
"qualified_name": entry["qualified_name"].clone(),
"object_type": entry["object_type"].clone(),
"column_count": entry["column_count"].as_u64().unwrap_or(0),
"foreign_key_count": entry["foreign_key_count"].as_u64().unwrap_or(0),
"index_count": entry["index_count"].as_u64().unwrap_or(0),
"database_type": value["database_type"].clone(),
}));
}
}
tools
}
fn traffic_discovery_tool_entries(value: &Value) -> Vec<Value> {
value["endpoints"]
.as_array()
.map(|endpoints| {
endpoints
.iter()
.map(|endpoint| {
let method = endpoint["method"].as_str().unwrap_or("GET");
let host = endpoint["host"].as_str().unwrap_or("unknown-host");
let path = endpoint["path"].as_str().unwrap_or("/");
json!({
"name": format!(
"traffic-{}-{}-{}",
slugify_label(method),
slugify_label(host),
slugify_label(path)
),
"kind": "traffic-endpoint",
"method": method,
"host": host,
"path": path,
"count": endpoint["count"].as_u64().unwrap_or(0),
"status_codes": endpoint["status_codes"].clone(),
"content_types": endpoint["content_types"].clone(),
"sample_url": endpoint["sample_url"].clone(),
"capture_kind": value["capture_kind"].clone(),
})
})
.collect::<Vec<_>>()
})
.unwrap_or_default()
}
fn render_discovery_snapshot_markdown(
entry: &discovery_snapshots::DiscoverySnapshotEntry,
) -> String {
let value = &entry.value;
let source_type = value["source_type"].as_str().unwrap_or("discovery");
let mut lines = vec![
format!("# {}", discovery_snapshot_title(entry)),
String::new(),
format!("- Source type: `{}`", source_type),
format!(
"- Discovery schema: `{}`",
value["discovery_schema"].as_str().unwrap_or("unknown")
),
format!("- Snapshot path: `{}`", entry.path.display()),
String::new(),
];
match source_type {
"codebase" => {
lines.push("## Summary".into());
lines.push(format!(
"- Project kinds: {}",
value["project_kinds"]
.as_array()
.map(|items| {
items
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ")
})
.filter(|value| !value.is_empty())
.unwrap_or_else(|| "none".into())
));
lines.push(format!(
"- Manifests: {}",
value["manifest_count"].as_u64().unwrap_or(0)
));
lines.push(format!(
"- Entrypoints: {}",
value["entrypoint_count"].as_u64().unwrap_or(0)
));
lines.push(format!(
"- Configs: {}",
value["config_count"].as_u64().unwrap_or(0)
));
if let Some(commands) = value["recommended_commands"].as_array() {
lines.push(String::new());
lines.push("## Recommended commands".into());
for command in commands.iter().take(8) {
lines.push(format!(
"- `{}`",
command["command"].as_str().unwrap_or("<command>")
));
}
}
}
"database" => {
lines.push("## Summary".into());
lines.push(format!(
"- Database type: `{}`",
value["database_type"].as_str().unwrap_or("unknown")
));
lines.push(format!(
"- Entries: {}",
value["count"].as_u64().unwrap_or(0)
));
if let Some(entries) = value["entries"].as_array() {
lines.push(String::new());
lines.push("## Tables and views".into());
for item in entries.iter().take(12) {
let name = item["name"].as_str().unwrap_or("<entry>");
let object_type = item["object_type"].as_str().unwrap_or("object");
let columns = item["column_count"].as_u64().unwrap_or(0);
lines.push(format!(
"- `{}` ({}, {} columns)",
name, object_type, columns
));
}
}
}
"graphql" => {
lines.push("## Summary".into());
lines.push(format!(
"- URL: `{}`",
value["url"].as_str().unwrap_or("<url>")
));
lines.push(format!(
"- Operations: {}",
value["operation_count"].as_u64().unwrap_or(0)
));
lines.push(format!(
"- Types: {}",
value["type_count"].as_u64().unwrap_or(0)
));
if let Some(operations) = value["operations"].as_array() {
lines.push(String::new());
lines.push("## Operations".into());
for op in operations.iter().take(12) {
let name = op["name"].as_str().unwrap_or("<operation>");
let kind = op["kind"].as_str().unwrap_or("query");
let arg_count = op["arg_count"].as_u64().unwrap_or(0);
lines.push(format!("- `{}` ({}, {} args)", name, kind, arg_count));
}
}
}
"traffic" => {
lines.push("## Summary".into());
lines.push(format!(
"- Capture kind: `{}`",
value["capture_kind"].as_str().unwrap_or("unknown")
));
lines.push(format!(
"- Requests: {}",
value["request_count"].as_u64().unwrap_or(0)
));
lines.push(format!(
"- Endpoints: {}",
value["endpoint_count"].as_u64().unwrap_or(0)
));
if let Some(endpoints) = value["endpoints"].as_array() {
lines.push(String::new());
lines.push("## Endpoint map".into());
for endpoint in endpoints.iter().take(12) {
lines.push(format!(
"- `{}` {}{} ({} request(s))",
endpoint["method"].as_str().unwrap_or("GET"),
endpoint["host"].as_str().unwrap_or("<host>"),
endpoint["path"].as_str().unwrap_or("/"),
endpoint["count"].as_u64().unwrap_or(0)
));
}
}
}
_ => {
lines.push("## Snapshot".into());
lines.push(
"This scaffolded discovery artifact currently uses the generic renderer.".into(),
);
}
}
lines.push(String::new());
lines.push("## Raw snapshot".into());
lines.push("```json".into());
lines.push(serde_json::to_string_pretty(value).unwrap_or_else(|_| "{}".into()));
lines.push("```".into());
lines.join("\n")
}
fn discovery_snapshot_title(entry: &discovery_snapshots::DiscoverySnapshotEntry) -> String {
let source_type = entry.value["source_type"].as_str().unwrap_or("discovery");
let stem = entry
.path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("snapshot");
format!("{} snapshot: {}", source_type.to_uppercase(), stem)
}
fn discovery_snapshot_brief(value: &Value, source_type: &str) -> String {
match source_type {
"codebase" => format!(
"{} project kind(s), {} config(s), {} recommended command(s)",
value["project_kind_count"].as_u64().unwrap_or(0),
value["config_count"].as_u64().unwrap_or(0),
value["recommended_command_count"].as_u64().unwrap_or(0)
),
"database" => format!(
"{} {} entry/entries",
value["database_type"].as_str().unwrap_or("database"),
value["count"].as_u64().unwrap_or(0)
),
"graphql" => format!(
"{} operation(s), {} type(s)",
value["operation_count"].as_u64().unwrap_or(0),
value["type_count"].as_u64().unwrap_or(0)
),
"traffic" => format!(
"{} request(s), {} endpoint(s)",
value["request_count"].as_u64().unwrap_or(0),
value["endpoint_count"].as_u64().unwrap_or(0)
),
_ => format!(
"{} snapshot from {}",
source_type,
value["source"].as_str().unwrap_or("unknown source")
),
}
}
fn load_sync_state_value(path: &Path) -> Option<Value> {
let value: Value = serde_json::from_str(&fs::read_to_string(path).ok()?).ok()?;
(value["sync_schema"].as_str() == Some(SYNC_STATE_SCHEMA)).then_some(value)
}
fn commands_needing_sync(drift: &Value) -> Vec<String> {
drift["entries"]
.as_array()
.map(|entries| {
entries
.iter()
.filter(|entry| entry["changed"].as_bool().unwrap_or(false))
.filter_map(|entry| entry["command"].as_str().map(str::to_string))
.take(10)
.collect::<Vec<_>>()
})
.unwrap_or_default()
}
fn sync_state_summary_value(install_paths: &InstallPaths, drift: &Value) -> Value {
let path = default_sync_state_path(install_paths);
let current_drift = drift["changed_count"].as_u64().unwrap_or(0);
let commands = commands_needing_sync(drift);
if let Some(state) = load_sync_state_value(&path) {
json!({
"path": path.display().to_string(),
"present": true,
"sync_schema": state["sync_schema"],
"last_synced_at": state["last_synced_at"],
"tracked_profile_count": state["profile_count"],
"host_count": state["host_count"],
"host_ids": state["host_ids"],
"last_mode": state["mode"],
"current_drift_count": current_drift,
"commands_needing_sync": commands,
})
} else {
json!({
"path": path.display().to_string(),
"present": false,
"sync_schema": SYNC_STATE_SCHEMA,
"last_synced_at": Value::Null,
"tracked_profile_count": 0,
"host_count": 0,
"host_ids": [],
"last_mode": Value::Null,
"current_drift_count": current_drift,
"commands_needing_sync": commands,
})
}
}
struct AddResultContext<'a> {
install_paths: &'a InstallPaths,
command: &'a str,
profile: &'a cli_surfaces::CliSurfaceProfile,
hosts: &'a [AiClientProfile],
outcomes: &'a [cli_surfaces::WriteOutcome],
auto_detected_hosts: bool,
preview_requested: bool,
auto_previewed_due_to_missing_hosts: bool,
}
fn add_result_value(ctx: AddResultContext<'_>) -> Value {
json!({
"command": "add",
"tool": ctx.command,
"root": ctx.install_paths.project_root().display().to_string(),
"install_scope": ctx.install_paths.scope().as_str(),
"effective_mode": if ctx.auto_previewed_due_to_missing_hosts || ctx.preview_requested { "preview" } else { "apply" },
"preview_requested": ctx.preview_requested,
"auto_previewed_due_to_missing_hosts": ctx.auto_previewed_due_to_missing_hosts,
"auto_detected_hosts": ctx.auto_detected_hosts,
"hosts": ctx.hosts.iter().copied().map(host_value).collect::<Vec<_>>(),
"profile": profile_summary_value(ctx.profile),
"outcomes": write_outcomes_value(ctx.outcomes),
"outcome_summary": write_outcome_summary_value(ctx.outcomes),
"recommended_command": if ctx.hosts.is_empty() {
Value::from(format!(
"sxmc add {} {} --host claude-code",
ctx.command,
scope_command_hint(ctx.install_paths)
))
} else {
Value::Null
}
})
}
fn explicit_structured_format(
format: Option<output::StructuredOutputFormat>,
pretty: bool,
) -> Option<output::StructuredOutputFormat> {
if format.is_some() || pretty {
Some(output::resolve_structured_format(format, pretty))
} else {
None
}
}
fn should_print_api_detection_banner(
format: Option<output::StructuredOutputFormat>,
pretty: bool,
) -> bool {
explicit_structured_format(format, pretty).is_none()
}
fn project_discovery_value(
mut value: Value,
counts_only: bool,
fields: Option<&[String]>,
offset: Option<usize>,
limit: Option<usize>,
) -> Value {
let Some(object) = value.as_object_mut() else {
return value;
};
let source_type = object
.get("source_type")
.and_then(Value::as_str)
.unwrap_or_default();
let collections: &[(&str, &str)] = match source_type {
"database" => &[("entries", "count")],
"traffic" => &[("endpoints", "endpoint_count")],
"codebase" => &[
("manifests", "manifest_count"),
("task_runners", "task_runner_count"),
("entrypoints", "entrypoint_count"),
("configs", "config_count"),
("recommended_commands", "recommended_command_count"),
("project_kinds", "project_kind_count"),
],
_ => &[],
};
if counts_only {
let mut filtered = serde_json::Map::new();
for (key, item) in object.iter() {
if !item.is_array() {
filtered.insert(key.clone(), item.clone());
}
}
filtered.insert("counts_only".into(), Value::Bool(true));
if offset.is_some() {
filtered.insert("offset".into(), json!(offset));
}
if limit.is_some() {
filtered.insert("limit".into(), json!(limit));
}
return Value::Object(filtered);
}
for (array_key, count_key) in collections {
let Some(items) = object.get(*array_key).and_then(Value::as_array) else {
continue;
};
let total = items.len();
let mut projected = items.clone();
if let Some(fields) = fields {
projected = projected
.into_iter()
.map(|item| retain_object_fields(item, fields))
.collect();
}
apply_offset_limit(&mut projected, offset, limit);
object.insert((*array_key).to_string(), Value::Array(projected.clone()));
if total != projected.len() {
object.insert(format!("total_{}", count_key), json!(total));
object.insert((*count_key).to_string(), json!(projected.len()));
}
}
if offset.is_some() {
object.insert("offset".into(), json!(offset));
}
if limit.is_some() {
object.insert("limit".into(), json!(limit));
}
value
}
struct SetupResultContext<'a> {
install_paths: &'a InstallPaths,
tools: &'a [String],
tool_results: &'a [Value],
auto_detected_tools: bool,
hosts: &'a [AiClientProfile],
auto_detected_hosts: bool,
preview_requested: bool,
auto_previewed_due_to_missing_hosts: bool,
}
fn setup_result_value(ctx: SetupResultContext<'_>) -> Value {
let mut created = 0u64;
let mut updated = 0u64;
let mut skipped = 0u64;
let mut removed = 0u64;
for result in ctx.tool_results {
let summary = &result["outcome_summary"];
created += summary["created"].as_u64().unwrap_or(0);
updated += summary["updated"].as_u64().unwrap_or(0);
skipped += summary["skipped"].as_u64().unwrap_or(0);
removed += summary["removed"].as_u64().unwrap_or(0);
}
json!({
"command": "setup",
"tools": ctx.tools,
"root": ctx.install_paths.project_root().display().to_string(),
"install_scope": ctx.install_paths.scope().as_str(),
"effective_mode": if ctx.auto_previewed_due_to_missing_hosts || ctx.preview_requested { "preview" } else { "apply" },
"preview_requested": ctx.preview_requested,
"auto_previewed_due_to_missing_hosts": ctx.auto_previewed_due_to_missing_hosts,
"auto_detected_tools": ctx.auto_detected_tools,
"auto_detected_hosts": ctx.auto_detected_hosts,
"hosts": ctx.hosts.iter().copied().map(host_value).collect::<Vec<_>>(),
"results": ctx.tool_results,
"outcome_summary": {
"created": created,
"updated": updated,
"skipped": skipped,
"removed": removed,
"total": created + updated + skipped + removed,
},
"recommended_command": if ctx.hosts.is_empty() {
Value::from(format!(
"sxmc setup --tool {} {} --host claude-code",
ctx.tools.join(","),
scope_command_hint(ctx.install_paths)
))
} else {
Value::Null
}
})
}
fn print_write_outcomes(outcomes: &[cli_surfaces::WriteOutcome]) {
let mut created = 0usize;
let mut updated = 0usize;
let mut skipped = 0usize;
let mut removed = 0usize;
for outcome in outcomes {
match outcome.mode {
ArtifactMode::Preview => {}
ArtifactMode::WriteSidecar => {
let verb = match outcome.status {
cli_surfaces::WriteStatus::Created => "Created sidecar for",
cli_surfaces::WriteStatus::Updated => "Updated sidecar for",
cli_surfaces::WriteStatus::Skipped => "Skipped unchanged sidecar for",
cli_surfaces::WriteStatus::Removed => "Removed sidecar for",
};
println!("{} {}: {}", verb, outcome.label, outcome.path.display());
match outcome.status {
cli_surfaces::WriteStatus::Created => created += 1,
cli_surfaces::WriteStatus::Updated => updated += 1,
cli_surfaces::WriteStatus::Skipped => skipped += 1,
cli_surfaces::WriteStatus::Removed => removed += 1,
}
}
ArtifactMode::Patch => {}
ArtifactMode::Apply => {
let verb = match outcome.status {
cli_surfaces::WriteStatus::Created => "Created",
cli_surfaces::WriteStatus::Updated => "Updated",
cli_surfaces::WriteStatus::Skipped => "Skipped unchanged",
cli_surfaces::WriteStatus::Removed => "Removed",
};
println!("{} {}: {}", verb, outcome.label, outcome.path.display());
match outcome.status {
cli_surfaces::WriteStatus::Created => created += 1,
cli_surfaces::WriteStatus::Updated => updated += 1,
cli_surfaces::WriteStatus::Skipped => skipped += 1,
cli_surfaces::WriteStatus::Removed => removed += 1,
}
}
}
}
let total = created + updated + skipped + removed;
if total > 0 {
println!(
"Summary: Created {}, Updated {}, Skipped unchanged {}, Removed {}",
created, updated, skipped, removed
);
}
}
fn print_preview_outcomes(outcomes: &[cli_surfaces::WriteOutcome]) {
for outcome in outcomes {
let verb = match outcome.status {
cli_surfaces::WriteStatus::Created => "Would create",
cli_surfaces::WriteStatus::Updated => "Would update",
cli_surfaces::WriteStatus::Skipped => "Would leave unchanged",
cli_surfaces::WriteStatus::Removed => "Would remove",
};
println!("{} {}: {}", verb, outcome.label, outcome.path.display());
}
}
fn eprint_write_outcomes(outcomes: &[cli_surfaces::WriteOutcome]) {
let mut created = 0usize;
let mut updated = 0usize;
let mut skipped = 0usize;
let mut removed = 0usize;
for outcome in outcomes {
match outcome.mode {
ArtifactMode::Preview => {}
ArtifactMode::WriteSidecar => {
let verb = match outcome.status {
cli_surfaces::WriteStatus::Created => "Created sidecar for",
cli_surfaces::WriteStatus::Updated => "Updated sidecar for",
cli_surfaces::WriteStatus::Skipped => "Skipped unchanged sidecar for",
cli_surfaces::WriteStatus::Removed => "Removed sidecar for",
};
eprintln!("{} {}: {}", verb, outcome.label, outcome.path.display());
match outcome.status {
cli_surfaces::WriteStatus::Created => created += 1,
cli_surfaces::WriteStatus::Updated => updated += 1,
cli_surfaces::WriteStatus::Skipped => skipped += 1,
cli_surfaces::WriteStatus::Removed => removed += 1,
}
}
ArtifactMode::Patch => {}
ArtifactMode::Apply => {
let verb = match outcome.status {
cli_surfaces::WriteStatus::Created => "Created",
cli_surfaces::WriteStatus::Updated => "Updated",
cli_surfaces::WriteStatus::Skipped => "Skipped unchanged",
cli_surfaces::WriteStatus::Removed => "Removed",
};
eprintln!("{} {}: {}", verb, outcome.label, outcome.path.display());
match outcome.status {
cli_surfaces::WriteStatus::Created => created += 1,
cli_surfaces::WriteStatus::Updated => updated += 1,
cli_surfaces::WriteStatus::Skipped => skipped += 1,
cli_surfaces::WriteStatus::Removed => removed += 1,
}
}
}
}
let total = created + updated + skipped + removed;
if total > 0 {
eprintln!(
"Summary: Created {}, Updated {}, Skipped unchanged {}, Removed {}",
created, updated, skipped, removed
);
}
}
fn eprint_preview_outcomes(outcomes: &[cli_surfaces::WriteOutcome]) {
for outcome in outcomes {
let verb = match outcome.status {
cli_surfaces::WriteStatus::Created => "Would create",
cli_surfaces::WriteStatus::Updated => "Would update",
cli_surfaces::WriteStatus::Skipped => "Would leave unchanged",
cli_surfaces::WriteStatus::Removed => "Would remove",
};
eprintln!("{} {}: {}", verb, outcome.label, outcome.path.display());
}
}
fn print_remove_outcomes(outcomes: &[cli_surfaces::WriteOutcome]) {
for outcome in outcomes {
match outcome.mode {
ArtifactMode::Preview => {}
ArtifactMode::WriteSidecar | ArtifactMode::Apply => {
println!("Removed {}: {}", outcome.label, outcome.path.display());
}
ArtifactMode::Patch => {}
}
}
}
fn ensure_profile_ready_for_agent_docs(
profile: &cli_surfaces::CliSurfaceProfile,
allow_low_confidence: bool,
) -> Result<()> {
let report = profile.quality_report();
if report.ready_for_agent_docs || allow_low_confidence {
return Ok(());
}
let reasons = if report.reasons.is_empty() {
"CLI profile confidence is too low for startup-doc generation.".to_string()
} else {
report
.reasons
.into_iter()
.map(|reason| format!("- {}", reason))
.collect::<Vec<_>>()
.join("\n")
};
Err(sxmc::error::SxmcError::Other(format!(
"Refusing to generate startup-facing agent docs from a low-confidence CLI profile.\n{}\nUse --allow-low-confidence to force generation or inspect with --depth 1 for a richer profile.",
reasons
)))
}
struct DoctorRepairOptions<'a> {
install_paths: &'a InstallPaths,
only_hosts: &'a [AiClientProfile],
from_cli: &'a str,
depth: usize,
skills_path: &'a std::path::Path,
allow_low_confidence: bool,
dry_run: bool,
remove: bool,
}
fn extract_managed_cli_command(text: &str) -> Option<String> {
text.lines().find_map(|line| {
let marker = "sxmc CLI Surface: `";
let (_, tail) = line.split_once(marker)?;
let command = tail.split('`').next()?.trim();
(!command.is_empty()).then(|| command.to_string())
})
}
fn infer_doctor_hosts(
install_paths: &InstallPaths,
only_hosts: &[AiClientProfile],
) -> Result<Vec<AiClientProfile>> {
if !only_hosts.is_empty() {
return Ok(only_hosts.to_vec());
}
let detected = auto_detect_add_hosts(install_paths);
if detected.is_empty() {
return Err(sxmc::error::SxmcError::Other(
"Could not infer which AI hosts to repair. Re-run with `--only <host>` or create host files first with `sxmc add <tool>`.".into(),
));
}
Ok(detected)
}
fn infer_doctor_from_cli(
install_paths: &InstallPaths,
only_hosts: &[AiClientProfile],
explicit: Option<&str>,
) -> Result<String> {
if let Some(explicit) = explicit {
return Ok(explicit.to_string());
}
let mut commands = std::collections::BTreeSet::new();
for (_, path) in doctor_startup_targets(install_paths, only_hosts) {
if path.exists() {
if let Ok(contents) = fs::read_to_string(&path) {
if let Some(command) = extract_managed_cli_command(&contents) {
commands.insert(command);
}
}
}
}
if commands.len() == 1 {
return Ok(commands.into_iter().next().unwrap());
}
let profile_dir = default_saved_profiles_dir(install_paths);
if profile_dir.exists() {
let profile_paths = collect_profile_paths(std::slice::from_ref(&profile_dir), true)?;
let mut profile_commands = std::collections::BTreeSet::new();
for path in profile_paths {
if let Ok(profile) = cli_surfaces::load_profile(&path) {
if !profile.command.trim().is_empty() {
profile_commands.insert(profile.command);
}
}
}
if profile_commands.len() == 1 {
return Ok(profile_commands.into_iter().next().unwrap());
}
}
Err(sxmc::error::SxmcError::Other(
"Could not infer which CLI profile to repair from. Re-run with `--from-cli <tool>` or save a single profile first with `sxmc add <tool>`.".into(),
))
}
fn repair_doctor_startup_files(
options: DoctorRepairOptions<'_>,
) -> Result<Vec<cli_surfaces::WriteOutcome>> {
let selected_hosts = infer_doctor_hosts(options.install_paths, options.only_hosts)?;
let profile = cli_surfaces::inspect_cli_with_depth(options.from_cli, true, options.depth)?;
let (artifacts, selected_hosts) = resolve_cli_ai_init_artifacts(
&profile,
AiCoverage::Full,
None,
&selected_hosts,
options.install_paths,
options.skills_path,
ArtifactMode::Apply,
)?;
if options.remove {
if options.dry_run {
cli_surfaces::remove_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Preview,
options.install_paths,
&selected_hosts,
)
} else {
cli_surfaces::remove_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
options.install_paths,
&selected_hosts,
)
}
} else {
ensure_profile_ready_for_agent_docs(&profile, options.allow_low_confidence)?;
if options.dry_run {
cli_surfaces::preview_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
options.install_paths,
&selected_hosts,
)
} else {
cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
options.install_paths,
&selected_hosts,
)
}
}
}
fn require_cli_ai_client(
coverage: AiCoverage,
client: Option<AiClientProfile>,
) -> Result<AiClientProfile> {
match (coverage, client) {
(AiCoverage::Single, Some(client)) => Ok(client),
(AiCoverage::Single, None) => Err(sxmc::error::SxmcError::Other(
"Single-host CLI->AI generation requires --client".into(),
)),
(AiCoverage::Full, Some(client)) => Ok(client),
(AiCoverage::Full, None) => Ok(AiClientProfile::ClaudeCode),
}
}
fn validate_full_apply_hosts(
mode: ArtifactMode,
coverage: AiCoverage,
hosts: &[AiClientProfile],
) -> Result<()> {
if coverage == AiCoverage::Full && mode == ArtifactMode::Apply && hosts.is_empty() {
return Err(sxmc::error::SxmcError::Other(
"Full-coverage apply requires at least one --host so sxmc knows which native files to update".into(),
));
}
Ok(())
}
fn ai_client_display_name(client: AiClientProfile) -> &'static str {
match client {
AiClientProfile::ClaudeCode => "Claude Code",
AiClientProfile::Cursor => "Cursor",
AiClientProfile::GeminiCli => "Gemini CLI",
AiClientProfile::GithubCopilot => "GitHub Copilot",
AiClientProfile::ContinueDev => "Continue",
AiClientProfile::OpenCode => "OpenCode",
AiClientProfile::JetbrainsAiAssistant => "JetBrains AI Assistant",
AiClientProfile::Junie => "Junie",
AiClientProfile::Windsurf => "Windsurf",
AiClientProfile::OpenaiCodex => "OpenAI/Codex",
AiClientProfile::GenericStdioMcp => "Generic stdio MCP",
AiClientProfile::GenericHttpMcp => "Generic HTTP MCP",
}
}
fn auto_detect_add_hosts(install_paths: &InstallPaths) -> Vec<AiClientProfile> {
cli_surfaces::AI_HOST_SPECS
.iter()
.filter_map(|spec| match spec.client {
AiClientProfile::GenericStdioMcp | AiClientProfile::GenericHttpMcp => None,
client => {
let has_config = install_paths
.host_config_path(client)
.map(|path| path.exists())
.unwrap_or(false);
let has_doc = install_paths
.host_doc_path(client)
.map(|path| path.exists())
.unwrap_or(false);
let runtime_detected = host_runtime_commands(client)
.iter()
.any(|command| command_exists_on_path(command));
let shared_doc_only = matches!(
client,
AiClientProfile::OpenCode | AiClientProfile::OpenaiCodex
);
if has_config || (has_doc && !shared_doc_only) || runtime_detected {
Some(client)
} else {
None
}
}
})
.collect()
}
fn host_runtime_commands(client: AiClientProfile) -> &'static [&'static str] {
match client {
AiClientProfile::ClaudeCode => &["claude", "claude-code"],
AiClientProfile::GeminiCli => &["gemini", "gemini-cli"],
AiClientProfile::OpenCode => &["opencode"],
AiClientProfile::OpenaiCodex => &["codex"],
_ => &[],
}
}
fn command_exists_on_path(command: &str) -> bool {
let candidate = std::path::Path::new(command);
if candidate.components().count() > 1 {
return candidate.is_file();
}
let path_var = match std::env::var_os("PATH") {
Some(path) => path,
None => return false,
};
let path_exts = if cfg!(windows) {
std::env::var_os("PATHEXT")
.map(|value| {
value
.to_string_lossy()
.split(';')
.map(|item| item.trim().trim_start_matches('.').to_ascii_lowercase())
.filter(|item| !item.is_empty())
.collect::<Vec<_>>()
})
.unwrap_or_else(|| vec!["exe".into(), "cmd".into(), "bat".into()])
} else {
Vec::new()
};
for dir in std::env::split_paths(&path_var) {
let direct = dir.join(command);
if direct.is_file() {
return true;
}
if cfg!(windows) {
for ext in &path_exts {
if dir.join(format!("{command}.{}", ext)).is_file() {
return true;
}
}
}
}
false
}
fn detect_setup_tools(limit: usize) -> Vec<String> {
const COMMON_SETUP_TOOLS: &[&str] = &[
"git",
"gh",
"docker",
"kubectl",
"terraform",
"cargo",
"npm",
"python3",
"jq",
"curl",
];
COMMON_SETUP_TOOLS
.iter()
.filter(|tool| command_exists_on_path(tool))
.take(limit)
.map(|tool| (*tool).to_string())
.collect()
}
fn host_label_list(hosts: &[AiClientProfile]) -> String {
hosts
.iter()
.map(|host| ai_client_display_name(*host))
.collect::<Vec<_>>()
.join(", ")
}
fn slugify_label(input: &str) -> String {
let mut slug = String::new();
let mut previous_dash = false;
for ch in input.chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
previous_dash = false;
} else if !previous_dash {
slug.push('-');
previous_dash = true;
}
}
slug.trim_matches('-').to_string()
}
#[derive(Clone)]
enum RuntimeMcpRegistration {
Stdio { command: String, args: Vec<String> },
}
fn render_runtime_client_config(
client: AiClientProfile,
server_name: &str,
registration: &RuntimeMcpRegistration,
) -> String {
match (client, registration) {
(AiClientProfile::OpenaiCodex, RuntimeMcpRegistration::Stdio { command, args }) => {
let args_json = serde_json::to_string(args).unwrap_or_else(|_| "[]".into());
format!(
"# sxmc MCP registration\n[mcp_servers.{server_name}]\ncommand = {command:?}\nargs = {args_json}\n"
)
}
(AiClientProfile::OpenCode, RuntimeMcpRegistration::Stdio { command, args }) => {
let mut full = vec![command.clone()];
full.extend(args.clone());
serde_json::to_string_pretty(&json!({
"mcp": {
server_name: {
"type": "local",
"command": full,
}
}
}))
.unwrap()
}
(_, RuntimeMcpRegistration::Stdio { command, args }) => {
serde_json::to_string_pretty(&json!({
"mcpServers": {
server_name: {
"command": command,
"args": args,
}
}
}))
.unwrap()
}
}
}
fn runtime_registration_artifact(
client: AiClientProfile,
root: &Path,
server_name: &str,
registration: &RuntimeMcpRegistration,
) -> Result<cli_surfaces::GeneratedArtifact> {
let spec = cli_surfaces::host_profile_spec(client);
let target = spec.native_config_target.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"{} does not have a native MCP config target in sxmc",
ai_client_display_name(client)
))
})?;
let apply_strategy = match spec.config_shape {
Some(cli_surfaces::ConfigShape::JsonMcpServers)
| Some(cli_surfaces::ConfigShape::JsonMcp) => cli_surfaces::ApplyStrategy::JsonMcpConfig,
Some(cli_surfaces::ConfigShape::TomlMcpServers) => {
cli_surfaces::ApplyStrategy::TomlManagedBlock
}
None => {
return Err(sxmc::error::SxmcError::Other(format!(
"{} does not have a native MCP config target in sxmc",
ai_client_display_name(client)
)))
}
};
Ok(cli_surfaces::GeneratedArtifact {
label: format!("{} MCP registration", spec.label),
target_path: root.join(target),
content: render_runtime_client_config(client, server_name, registration),
apply_strategy,
audience: cli_surfaces::ArtifactAudience::Client(client),
sidecar_scope: spec.sidecar_scope.into(),
})
}
fn apply_runtime_registration(
root: &Path,
register_hosts: &[AiClientProfile],
register_mode: ArtifactMode,
register_name: Option<&str>,
default_name: &str,
registration: &RuntimeMcpRegistration,
) -> Result<()> {
if register_hosts.is_empty() {
return Ok(());
}
let server_name = register_name.unwrap_or(default_name);
let artifacts = register_hosts
.iter()
.map(|host| runtime_registration_artifact(*host, root, server_name, registration))
.collect::<Result<Vec<_>>>()?;
let install_paths = InstallPaths::local(root.to_path_buf());
if register_mode == ArtifactMode::Preview {
let outcomes = cli_surfaces::preview_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
&install_paths,
register_hosts,
)?;
eprint_preview_outcomes(&outcomes);
} else {
let outcomes = cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
register_mode,
&install_paths,
register_hosts,
)?;
eprint_write_outcomes(&outcomes);
}
Ok(())
}
fn load_discovery_snapshots(path: &Path) -> Result<Vec<Value>> {
discovery_snapshots::load_snapshot_inputs(path)
.map(|entries| entries.into_iter().map(|entry| entry.value).collect())
}
fn discovery_source_type(snapshot: &Value) -> &str {
snapshot["source_type"].as_str().unwrap_or("unknown")
}
fn discovery_label(snapshot: &Value) -> String {
match discovery_source_type(snapshot) {
"codebase" => format!(
"codebase at {}",
snapshot["root"].as_str().unwrap_or("<unknown>")
),
"db" | "database" => format!(
"{} database {}",
snapshot["database_type"].as_str().unwrap_or("unknown"),
snapshot["source"].as_str().unwrap_or("<unknown>")
),
"graphql" => format!(
"GraphQL schema {}",
snapshot["url"].as_str().unwrap_or("<unknown>")
),
"traffic" => format!(
"{} traffic {}",
snapshot["capture_kind"].as_str().unwrap_or("saved"),
snapshot["source"].as_str().unwrap_or("<unknown>")
),
other => other.to_string(),
}
}
fn discovery_kind_title(snapshot: &Value) -> &'static str {
match discovery_source_type(snapshot) {
"codebase" => "Codebase",
"db" | "database" => "Database",
"graphql" => "GraphQL",
"traffic" => "Traffic",
_ => "Discovery",
}
}
fn discovery_sidecar_scope(snapshot: &Value) -> String {
format!("discover-{}", discovery_source_type(snapshot))
}
fn discovery_summary_lines(snapshot: &Value) -> Vec<String> {
match discovery_source_type(snapshot) {
"codebase" => {
let kinds = snapshot["project_kinds"]
.as_array()
.map(|items| {
items
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ")
})
.unwrap_or_default();
let commands = snapshot["recommended_commands"]
.as_array()
.map(|items| {
items
.iter()
.filter_map(|item| item["command"].as_str())
.take(6)
.map(|command| format!("- `{}`", command))
.collect::<Vec<_>>()
})
.unwrap_or_default();
let mut lines = vec![format!(
"- Project kinds: {}",
if kinds.is_empty() {
"unknown".into()
} else {
kinds
}
)];
lines.push(format!(
"- Manifests: {}, entrypoints: {}, configs: {}",
snapshot["manifest_count"].as_u64().unwrap_or(0),
snapshot["entrypoint_count"].as_u64().unwrap_or(0),
snapshot["config_count"].as_u64().unwrap_or(0)
));
if !commands.is_empty() {
lines.push("- Recommended commands:".into());
lines.extend(commands);
}
lines
}
"db" | "database" => {
let entries = snapshot["entries"]
.as_array()
.map(|items| {
items
.iter()
.take(8)
.map(|entry| {
format!(
"- `{}`: {} columns, {} foreign keys, {} indexes",
entry["qualified_name"]
.as_str()
.or_else(|| entry["name"].as_str())
.unwrap_or("<unknown>"),
entry["column_count"].as_u64().unwrap_or(0),
entry["foreign_key_count"].as_u64().unwrap_or(0),
entry["index_count"].as_u64().unwrap_or(0)
)
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
let mut lines = vec![format!(
"- {} discovered tables/views",
snapshot["count"].as_u64().unwrap_or(0)
)];
lines.extend(entries);
lines
}
"graphql" => {
let operations = snapshot["operations"]
.as_array()
.map(|items| {
items
.iter()
.take(10)
.map(|operation| {
format!(
"- `{}` {} ({} args)",
operation["kind"].as_str().unwrap_or("op"),
operation["name"].as_str().unwrap_or("<unknown>"),
operation["arg_count"].as_u64().unwrap_or(0)
)
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
let mut lines = vec![format!(
"- Query root: `{}`, mutation root: `{}`",
snapshot["query_type"].as_str().unwrap_or("Query"),
snapshot["mutation_type"].as_str().unwrap_or("Mutation")
)];
lines.push(format!(
"- {} operations across {} types",
snapshot["operation_count"].as_u64().unwrap_or(0),
snapshot["type_count"].as_u64().unwrap_or(0)
));
lines.extend(operations);
lines
}
"traffic" => {
let endpoints = snapshot["endpoints"]
.as_array()
.map(|items| {
items
.iter()
.take(10)
.map(|endpoint| {
format!(
"- `{}` {}{} ({} requests)",
endpoint["method"].as_str().unwrap_or("GET"),
endpoint["host"].as_str().unwrap_or("<host>"),
endpoint["path"].as_str().unwrap_or("/"),
endpoint["count"].as_u64().unwrap_or(0)
)
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
let mut lines = vec![format!(
"- {} requests grouped into {} endpoints",
snapshot["request_count"].as_u64().unwrap_or(0),
snapshot["endpoint_count"].as_u64().unwrap_or(0)
)];
lines.extend(endpoints);
lines
}
_ => vec![format!(
"- Snapshot schema: `{}`",
snapshot["discovery_schema"].as_str().unwrap_or("<unknown>")
)],
}
}
fn render_discovery_agent_doc(snapshot: &Value, client: AiClientProfile) -> String {
let spec = cli_surfaces::host_profile_spec(client);
let mut lines = vec![
format!(
"## sxmc Discovery Context: {}",
discovery_source_type(snapshot)
),
String::new(),
format!(
"Use this discovered {} context as supporting repo knowledge for {}.",
discovery_source_type(snapshot),
spec.label
),
String::new(),
format!("Source: {}", discovery_label(snapshot)),
String::new(),
"Highlights:".into(),
];
lines.extend(discovery_summary_lines(snapshot));
lines.push(String::new());
lines.push("Guidance:".into());
lines.push(
"- Treat this as discovered context, not as a replacement for live verification before write actions."
.into(),
);
lines.push(
"- Re-run the matching `sxmc discover ... --output <file>` command when the interface may have changed."
.into(),
);
lines.push(
"- Prefer the concrete commands, tables, operations, and endpoints listed here over guessed names."
.into(),
);
lines.push(format!("- Reference: {}", spec.official_reference_url));
lines.join("\n")
}
fn render_discovery_portable_agent_doc(snapshot: &Value) -> String {
let mut lines = vec![
format!(
"## sxmc Discovery Context: {}",
discovery_source_type(snapshot)
),
String::new(),
format!(
"Use this discovered {} context as portable startup knowledge across AI tools in this repo.",
discovery_source_type(snapshot)
),
String::new(),
format!("Source: {}", discovery_label(snapshot)),
String::new(),
"Highlights:".into(),
];
lines.extend(discovery_summary_lines(snapshot));
lines.push(String::new());
lines.push("Recommended flow:".into());
lines.push(
"- Refresh the snapshot with `sxmc discover ... --output <file>` when the underlying surface changes."
.into(),
);
lines.push(
"- Keep bulky discovery JSON in files and feed concise context into startup docs.".into(),
);
lines.push(
"- Use the discovered names here before improvising tables, operations, or endpoints."
.into(),
);
lines.join("\n")
}
fn generate_discovery_portable_agent_doc_artifact(
snapshot: &Value,
install_paths: &InstallPaths,
) -> cli_surfaces::GeneratedArtifact {
cli_surfaces::GeneratedArtifact {
label: format!("Portable {} context", discovery_kind_title(snapshot)),
target_path: install_paths.portable_agent_doc_path(),
content: render_discovery_portable_agent_doc(snapshot),
apply_strategy: cli_surfaces::ApplyStrategy::ManagedMarkdownBlock,
audience: cli_surfaces::ArtifactAudience::Portable,
sidecar_scope: discovery_sidecar_scope(snapshot),
}
}
fn generate_discovery_agent_doc_artifact(
snapshot: &Value,
client: AiClientProfile,
install_paths: &InstallPaths,
) -> cli_surfaces::GeneratedArtifact {
let spec = cli_surfaces::host_profile_spec(client);
cli_surfaces::GeneratedArtifact {
label: format!("{} {} context", spec.label, discovery_kind_title(snapshot)),
target_path: install_paths
.host_doc_path(client)
.unwrap_or_else(|| install_paths.portable_agent_doc_path()),
content: render_discovery_agent_doc(snapshot, client),
apply_strategy: cli_surfaces::ApplyStrategy::ManagedMarkdownBlock,
audience: cli_surfaces::ArtifactAudience::Client(client),
sidecar_scope: discovery_sidecar_scope(snapshot),
}
}
fn generate_discovery_host_native_agent_doc_artifacts(
snapshot: &Value,
install_paths: &InstallPaths,
) -> Vec<cli_surfaces::GeneratedArtifact> {
cli_surfaces::AI_HOST_SPECS
.iter()
.filter(|spec| spec.native_doc_target.is_some())
.map(|spec| generate_discovery_agent_doc_artifact(snapshot, spec.client, install_paths))
.collect()
}
fn resolve_discovery_init_artifacts(
snapshots: &[Value],
coverage: AiCoverage,
client: Option<AiClientProfile>,
hosts: &[AiClientProfile],
install_paths: &InstallPaths,
mode: ArtifactMode,
) -> Result<(Vec<cli_surfaces::GeneratedArtifact>, Vec<AiClientProfile>)> {
validate_full_apply_hosts(mode, coverage, hosts)?;
match coverage {
AiCoverage::Single => {
let client = require_cli_ai_client(coverage, client)?;
Ok((
snapshots
.iter()
.map(|snapshot| {
generate_discovery_agent_doc_artifact(snapshot, client, install_paths)
})
.collect(),
vec![client],
))
}
AiCoverage::Full => Ok((
snapshots
.iter()
.flat_map(|snapshot| {
std::iter::once(generate_discovery_portable_agent_doc_artifact(
snapshot,
install_paths,
))
.chain(
generate_discovery_host_native_agent_doc_artifacts(snapshot, install_paths),
)
})
.collect(),
hosts.to_vec(),
)),
}
}
fn resolve_cli_ai_init_artifacts(
profile: &cli_surfaces::CliSurfaceProfile,
coverage: AiCoverage,
client: Option<AiClientProfile>,
hosts: &[AiClientProfile],
install_paths: &InstallPaths,
skills_path: &std::path::Path,
mode: ArtifactMode,
) -> Result<(Vec<cli_surfaces::GeneratedArtifact>, Vec<AiClientProfile>)> {
validate_full_apply_hosts(mode, coverage, hosts)?;
match coverage {
AiCoverage::Single => {
let client = require_cli_ai_client(coverage, client)?;
let profile_artifact = cli_surfaces::generate_profile_artifact(profile, install_paths)?;
let agent_doc =
cli_surfaces::generate_agent_doc_artifact(profile, client, install_paths);
let mut artifacts = vec![profile_artifact, agent_doc];
if let Some(client_config) = cli_surfaces::generate_client_config_artifact(
profile,
client,
install_paths,
skills_path,
) {
artifacts.push(client_config);
}
Ok((artifacts, vec![client]))
}
AiCoverage::Full => Ok((
cli_surfaces::generate_full_coverage_init_artifacts(
profile,
install_paths,
skills_path,
)?,
hosts.to_vec(),
)),
}
}
fn resolve_cli_ai_agent_doc_artifacts(
profile: &cli_surfaces::CliSurfaceProfile,
coverage: AiCoverage,
client: Option<AiClientProfile>,
hosts: &[AiClientProfile],
install_paths: &InstallPaths,
mode: ArtifactMode,
) -> Result<(Vec<cli_surfaces::GeneratedArtifact>, Vec<AiClientProfile>)> {
validate_full_apply_hosts(mode, coverage, hosts)?;
match coverage {
AiCoverage::Single => {
let client = require_cli_ai_client(coverage, client)?;
Ok((
vec![cli_surfaces::generate_agent_doc_artifact(
profile,
client,
install_paths,
)],
vec![client],
))
}
AiCoverage::Full => {
let mut artifacts = vec![cli_surfaces::generate_portable_agent_doc_artifact(
profile,
install_paths,
)];
artifacts.extend(cli_surfaces::generate_host_native_agent_doc_artifacts(
profile,
install_paths,
));
Ok((artifacts, hosts.to_vec()))
}
}
}
fn resolve_cli_ai_client_config_artifacts(
profile: &cli_surfaces::CliSurfaceProfile,
coverage: AiCoverage,
client: Option<AiClientProfile>,
hosts: &[AiClientProfile],
install_paths: &InstallPaths,
skills_path: &std::path::Path,
mode: ArtifactMode,
) -> Result<(Vec<cli_surfaces::GeneratedArtifact>, Vec<AiClientProfile>)> {
validate_full_apply_hosts(mode, coverage, hosts)?;
match coverage {
AiCoverage::Single => {
let client = require_cli_ai_client(coverage, client)?;
let artifact = cli_surfaces::generate_client_config_artifact(
profile,
client,
install_paths,
skills_path,
)
.ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"{} does not have a native MCP config target in sxmc",
ai_client_display_name(client)
))
})?;
Ok((vec![artifact], vec![client]))
}
AiCoverage::Full => {
let mut artifacts = Vec::new();
for client in [
AiClientProfile::ClaudeCode,
AiClientProfile::Cursor,
AiClientProfile::GeminiCli,
AiClientProfile::GithubCopilot,
AiClientProfile::ContinueDev,
AiClientProfile::OpenCode,
AiClientProfile::JetbrainsAiAssistant,
AiClientProfile::Junie,
AiClientProfile::Windsurf,
AiClientProfile::OpenaiCodex,
AiClientProfile::GenericStdioMcp,
AiClientProfile::GenericHttpMcp,
] {
if let Some(artifact) = cli_surfaces::generate_client_config_artifact(
profile,
client,
install_paths,
skills_path,
) {
artifacts.push(artifact);
}
}
Ok((artifacts, hosts.to_vec()))
}
}
}
fn profile_generation_depth(profile: &cli_surfaces::CliSurfaceProfile) -> usize {
let inferred_depth = profile
.subcommand_profiles
.iter()
.map(profile_generation_depth)
.max()
.map(|depth| depth + 1)
.unwrap_or(0);
let recorded_depth = profile.provenance.generation_depth as usize;
inferred_depth.max(recorded_depth)
}
fn profile_command_executable(command: &str) -> String {
cli_surfaces::parse_command_spec(command)
.ok()
.and_then(|parts| parts.first().cloned())
.or_else(|| command.split_whitespace().next().map(str::to_string))
.unwrap_or_else(|| command.to_string())
}
fn sync_state_value(
install_paths: &InstallPaths,
hosts: &[AiClientProfile],
mode: &str,
entries: &[Value],
) -> Value {
json!({
"sync_schema": SYNC_STATE_SCHEMA,
"root": install_paths.project_root().display().to_string(),
"install_scope": install_paths.scope().as_str(),
"profile_dir": default_saved_profiles_dir(install_paths).display().to_string(),
"state_path": default_sync_state_path(install_paths).display().to_string(),
"last_synced_at": Utc::now().to_rfc3339(),
"mode": mode,
"profile_count": entries.len(),
"host_count": hosts.len(),
"host_ids": hosts.iter().copied().map(ai_client_id).collect::<Vec<_>>(),
"entries": entries,
})
}
fn sync_saved_profiles_value(
install_paths: &InstallPaths,
only_hosts: &[AiClientProfile],
skills_path: &Path,
apply: bool,
allow_low_confidence: bool,
) -> Result<Value> {
let profile_dir = default_saved_profiles_dir(install_paths);
let state_path = default_sync_state_path(install_paths);
let selected_hosts = if only_hosts.is_empty() {
auto_detect_add_hosts(install_paths)
} else {
only_hosts.to_vec()
};
let profile_paths = if profile_dir.exists() {
collect_profile_paths(std::slice::from_ref(&profile_dir), true)?
} else {
Vec::new()
};
let mode = if apply {
ArtifactMode::Apply
} else {
ArtifactMode::Preview
};
let mut entries = Vec::new();
let mut profile_outcomes = Vec::new();
let mut artifact_outcomes = Vec::new();
let mut state_entries = Vec::new();
let mut changed_count = 0usize;
let mut unchanged_count = 0usize;
let mut blocked_count = 0usize;
let mut error_count = 0usize;
for path in &profile_paths {
match cli_surfaces::load_profile(path) {
Ok(saved_profile) => {
let depth = profile_generation_depth(&saved_profile);
let executable = profile_command_executable(&saved_profile.command);
let executable_fingerprint = cli_surfaces::executable_fingerprint(&executable);
match cli_surfaces::inspect_cli_with_depth(&saved_profile.command, true, depth) {
Ok(refreshed_profile) => {
let diff =
cli_surfaces::diff_profile_value(&saved_profile, &refreshed_profile);
let changed = diff_value_has_changes(&diff);
let quality = refreshed_profile.quality_report();
let ready_for_agent_docs =
quality.ready_for_agent_docs || allow_low_confidence;
let mut state = if changed {
changed_count += 1;
if apply {
"applied"
} else {
"pending"
}
} else {
unchanged_count += 1;
"unchanged"
};
let profile_outcome = sync_profile_write_outcome(path, mode, changed);
profile_outcomes.push(profile_outcome.clone());
if changed && apply {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(path, serde_json::to_string_pretty(&refreshed_profile)?)?;
}
let mut artifact_mode = "not_needed";
let mut blocked_reason = Value::Null;
let mut entry_artifact_outcomes = Vec::new();
if changed && !selected_hosts.is_empty() {
if ready_for_agent_docs {
let (mut artifacts, selected_hosts) =
resolve_cli_ai_init_artifacts(
&refreshed_profile,
AiCoverage::Full,
None,
&selected_hosts,
install_paths,
skills_path,
ArtifactMode::Apply,
)?;
artifacts.retain(|artifact| {
!(artifact.label == "CLI profile"
&& artifact.sidecar_scope == "profiles")
});
entry_artifact_outcomes = if apply {
cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
install_paths,
&selected_hosts,
)?
} else {
cli_surfaces::preview_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
install_paths,
&selected_hosts,
)?
};
artifact_mode = if apply { "applied" } else { "previewed" };
} else {
blocked_count += 1;
state = if apply {
"applied_profile_only"
} else {
"pending_profile_only"
};
artifact_mode = "blocked_low_confidence";
blocked_reason = Value::from(
"Profile quality is below the startup-doc threshold. Re-run with --allow-low-confidence to refresh AI-host artifacts too.",
);
}
}
artifact_outcomes.extend(entry_artifact_outcomes.iter().cloned());
let final_profile = if changed {
refreshed_profile.clone()
} else {
saved_profile.clone()
};
state_entries.push(json!({
"command": final_profile.command,
"profile_path": path.display().to_string(),
"generation_depth": depth,
"executable": executable,
"executable_fingerprint": executable_fingerprint,
"profile_sha256": profile_sha256(&final_profile)?,
"generated_at": final_profile.provenance.generated_at,
"quality": {
"ready_for_agent_docs": final_profile.quality_report().ready_for_agent_docs,
"score": final_profile.quality_report().score,
"level": final_profile.quality_report().level,
},
"state": state,
}));
entries.push(json!({
"command": saved_profile.command,
"profile_path": path.display().to_string(),
"generation_depth": depth,
"executable": executable,
"executable_fingerprint": executable_fingerprint,
"changed": changed,
"state": state,
"profile": profile_summary_value(&refreshed_profile),
"diff": diff,
"profile_outcome": {
"label": profile_outcome.label,
"path": profile_outcome.path.display().to_string(),
"mode": artifact_mode_name(profile_outcome.mode),
"status": write_status_name(profile_outcome.status),
},
"artifact_mode": artifact_mode,
"artifact_outcomes": write_outcomes_value(&entry_artifact_outcomes),
"artifact_outcome_summary": write_outcome_summary_value(&entry_artifact_outcomes),
"blocked_reason": blocked_reason,
}));
}
Err(error) => {
error_count += 1;
entries.push(json!({
"command": saved_profile.command,
"profile_path": path.display().to_string(),
"generation_depth": depth,
"executable": executable,
"executable_fingerprint": executable_fingerprint,
"changed": Value::Null,
"state": "error",
"error": error.to_string(),
}));
}
}
}
Err(error) => {
error_count += 1;
entries.push(json!({
"profile_path": path.display().to_string(),
"state": "error",
"error": error.to_string(),
}));
}
}
}
let state_value = sync_state_value(
install_paths,
&selected_hosts,
sync_mode_name(apply),
&state_entries,
);
if apply {
if let Some(parent) = state_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&state_path, serde_json::to_string_pretty(&state_value)?)?;
}
Ok(json!({
"command": "sync",
"root": install_paths.project_root().display().to_string(),
"install_scope": install_paths.scope().as_str(),
"mode": sync_mode_name(apply),
"state_path": state_path.display().to_string(),
"profile_dir": profile_dir.display().to_string(),
"host_ids": selected_hosts.iter().copied().map(ai_client_id).collect::<Vec<_>>(),
"profile_count": profile_paths.len(),
"changed_count": changed_count,
"unchanged_count": unchanged_count,
"blocked_count": blocked_count,
"error_count": error_count,
"profile_outcomes": write_outcomes_value(&profile_outcomes),
"profile_outcome_summary": write_outcome_summary_value(&profile_outcomes),
"artifact_outcomes": write_outcomes_value(&artifact_outcomes),
"artifact_outcome_summary": write_outcome_summary_value(&artifact_outcomes),
"entries": entries,
"sync_state": state_value,
"recommended_command": if !apply {
Value::from(format!("sxmc sync {} --apply", scope_command_hint(install_paths)))
} else {
Value::Null
}
}))
}
fn format_sync_report(value: &Value) -> String {
let mut lines = vec![
format!("Root: {}", value["root"].as_str().unwrap_or("<unknown>")),
format!(
"Install scope: {}",
value["install_scope"].as_str().unwrap_or("local")
),
format!("Mode: {}", value["mode"].as_str().unwrap_or("preview")),
format!(
"Saved profiles: {} total, {} changed, {} unchanged, {} blocked, {} errors",
value["profile_count"].as_u64().unwrap_or(0),
value["changed_count"].as_u64().unwrap_or(0),
value["unchanged_count"].as_u64().unwrap_or(0),
value["blocked_count"].as_u64().unwrap_or(0),
value["error_count"].as_u64().unwrap_or(0),
),
];
if let Some(host_ids) = value["host_ids"].as_array() {
let labels = host_ids
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ");
lines.push(format!(
"Hosts: {}",
if labels.is_empty() {
"none".into()
} else {
labels
}
));
}
if let Some(last_synced_at) = value["sync_state"]["last_synced_at"].as_str() {
lines.push(format!("State written: {}", last_synced_at));
}
if let Some(entries) = value["entries"].as_array() {
let changed = entries
.iter()
.filter(|entry| entry["changed"].as_bool().unwrap_or(false))
.take(8)
.collect::<Vec<_>>();
if !changed.is_empty() {
lines.push("Changed commands:".into());
for entry in changed {
lines.push(format!(
"- {} ({})",
entry["command"].as_str().unwrap_or("<unknown>"),
entry["state"].as_str().unwrap_or("pending")
));
if let Some(reason) = entry["blocked_reason"].as_str() {
lines.push(format!(" note: {}", reason));
}
}
}
let errors = entries
.iter()
.filter(|entry| !entry["error"].is_null())
.take(5)
.collect::<Vec<_>>();
if !errors.is_empty() {
lines.push("Errors:".into());
for entry in errors {
lines.push(format!(
"- {}: {}",
entry["command"].as_str().unwrap_or("<unknown>"),
entry["error"].as_str().unwrap_or("unknown error")
));
}
}
}
if let Some(command) = value["recommended_command"].as_str() {
lines.push(format!("Next: `{}`", command));
}
lines.join("\n")
}
#[tokio::main]
async fn main() -> Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Serve {
paths,
discovery_snapshots,
discovery_tool_manifests,
watch,
transport,
port,
host,
require_headers,
bearer_token,
max_concurrency,
max_request_bytes,
register_hosts,
register_root,
register_mode,
register_name,
} => {
let search_paths = resolve_paths(paths);
if !register_hosts.is_empty() && transport != "stdio" {
return Err(sxmc::error::SxmcError::Other(
"Automatic MCP registration currently supports stdio transport only for `sxmc serve`. Use `--transport stdio` or register the HTTP endpoint manually.".into(),
));
}
if !register_hosts.is_empty() {
let root = resolve_generation_root(register_root)?;
let search_paths_arg = search_paths
.iter()
.map(|path| path.display().to_string())
.collect::<Vec<_>>()
.join(",");
let mut args = vec!["serve".to_string()];
if !search_paths_arg.is_empty() {
args.push("--paths".into());
args.push(search_paths_arg);
}
if !discovery_snapshots.is_empty() {
let snapshot_arg = discovery_snapshots
.iter()
.map(|path| path.display().to_string())
.collect::<Vec<_>>()
.join(",");
args.push("--discovery-snapshot".into());
args.push(snapshot_arg);
}
if !discovery_tool_manifests.is_empty() {
let manifest_arg = discovery_tool_manifests
.iter()
.map(|path| path.display().to_string())
.collect::<Vec<_>>()
.join(",");
args.push("--discovery-tool-manifest".into());
args.push(manifest_arg);
}
if watch {
args.push("--watch".into());
}
let registration = RuntimeMcpRegistration::Stdio {
command: "sxmc".into(),
args,
};
apply_runtime_registration(
&root,
®ister_hosts,
register_mode,
register_name.as_deref(),
"sxmc-serve",
®istration,
)?;
}
let required_headers = parse_headers(&require_headers)?;
let bearer_token = parse_optional_secret(bearer_token)?;
let limits = HttpServeLimits {
max_concurrency,
max_request_body_bytes: max_request_bytes,
};
match transport.as_str() {
"stdio" => {
if !required_headers.is_empty() || bearer_token.is_some() {
eprintln!(
"[sxmc] Warning: remote auth flags are ignored for stdio transport"
);
}
server::serve_stdio(
&search_paths,
&discovery_snapshots,
&discovery_tool_manifests,
watch,
)
.await?
}
"http" | "sse" => {
server::serve_http(
&search_paths,
&discovery_snapshots,
&discovery_tool_manifests,
&host,
port,
&required_headers,
bearer_token.as_deref(),
watch,
limits,
)
.await?
}
other => {
eprintln!("[sxmc] Unknown transport: {}", other);
std::process::exit(1);
}
}
}
Commands::Wrap {
command,
depth,
transport,
port,
host,
timeout_seconds,
progress_seconds,
working_dir,
max_stdout_bytes,
max_stderr_bytes,
execution_history_limit,
allow_tools,
deny_tools,
allow_options,
deny_options,
allow_positionals,
deny_positionals,
require_headers,
bearer_token,
max_concurrency,
max_request_bytes,
allow_self,
register_hosts,
register_root,
register_mode,
register_name,
} => {
let profile = cli_surfaces::inspect_cli_with_depth(&command, allow_self, depth)?;
if !register_hosts.is_empty() && transport != "stdio" {
return Err(sxmc::error::SxmcError::Other(
"Automatic MCP registration currently supports stdio transport only for `sxmc wrap`. Use the default stdio transport or register the HTTP endpoint manually.".into(),
));
}
if !register_hosts.is_empty() {
let root = resolve_generation_root(register_root)?;
let mut args = vec!["wrap".to_string(), command.clone()];
if depth != 1 {
args.push("--depth".into());
args.push(depth.to_string());
}
if timeout_seconds != 30 {
args.push("--timeout-seconds".into());
args.push(timeout_seconds.to_string());
}
if progress_seconds != 0 {
args.push("--progress-seconds".into());
args.push(progress_seconds.to_string());
}
if let Some(path) = &working_dir {
args.push("--working-dir".into());
args.push(path.display().to_string());
}
if max_stdout_bytes != 256 * 1024 {
args.push("--max-stdout-bytes".into());
args.push(max_stdout_bytes.to_string());
}
if max_stderr_bytes != 128 * 1024 {
args.push("--max-stderr-bytes".into());
args.push(max_stderr_bytes.to_string());
}
if execution_history_limit != 25 {
args.push("--execution-history-limit".into());
args.push(execution_history_limit.to_string());
}
if !allow_tools.is_empty() {
args.push("--allow-tool".into());
args.push(allow_tools.join(","));
}
if !deny_tools.is_empty() {
args.push("--deny-tool".into());
args.push(deny_tools.join(","));
}
if !allow_options.is_empty() {
args.push("--allow-option".into());
args.push(allow_options.join(","));
}
if !deny_options.is_empty() {
args.push("--deny-option".into());
args.push(deny_options.join(","));
}
if !allow_positionals.is_empty() {
args.push("--allow-positional".into());
args.push(allow_positionals.join(","));
}
if !deny_positionals.is_empty() {
args.push("--deny-positional".into());
args.push(deny_positionals.join(","));
}
if allow_self {
args.push("--allow-self".into());
}
let registration = RuntimeMcpRegistration::Stdio {
command: "sxmc".into(),
args,
};
apply_runtime_registration(
&root,
®ister_hosts,
register_mode,
register_name.as_deref(),
&format!("sxmc-wrap-{}", slugify_label(&profile.command)),
®istration,
)?;
}
let working_dir = working_dir.map(|path| {
if path.is_absolute() {
path
} else {
std::env::current_dir()
.unwrap_or_else(|_| PathBuf::from("."))
.join(path)
}
});
let server = server::build_wrapped_cli_server(
&command,
&profile,
server::WrappedCliOptions {
timeout_secs: timeout_seconds,
progress_secs: progress_seconds,
working_dir: working_dir.map(|path| path.display().to_string()),
max_stdout_bytes,
max_stderr_bytes,
execution_history_limit,
allow_tools,
deny_tools,
allow_options,
deny_options,
allow_positionals,
deny_positionals,
},
)?;
for message in server.skipped_interactive_messages() {
eprintln!("[sxmc] {}", message);
}
let required_headers = parse_headers(&require_headers)?;
let bearer_token = parse_optional_secret(bearer_token)?;
let limits = HttpServeLimits {
max_concurrency,
max_request_body_bytes: max_request_bytes,
};
match transport.as_str() {
"stdio" => {
if !required_headers.is_empty() || bearer_token.is_some() {
eprintln!(
"[sxmc] Warning: remote auth flags are ignored for stdio transport"
);
}
server::serve_wrapped_cli_stdio(server).await?
}
"http" | "sse" => {
server::serve_wrapped_cli_http(
server,
&host,
port,
&required_headers,
bearer_token.as_deref(),
limits,
)
.await?
}
other => {
eprintln!("[sxmc] Unknown transport: {}", other);
std::process::exit(1);
}
}
}
Commands::Skills { action } => match action {
SkillsAction::List {
paths,
installed,
skills_path,
local,
global,
root,
json,
names_only,
counts_only,
no_descriptions,
fields,
offset,
limit,
} => {
let resolved_paths = if installed {
let install_paths = resolve_install_paths(
resolve_skills_install_root(root),
global,
local,
)?;
vec![install_paths.resolve_skills_path(&skills_path)]
} else {
resolve_paths(paths)
};
cmd_skills_list(
&resolved_paths,
SkillListOptions {
json_output: json,
installed_only: installed,
names_only,
counts_only,
no_descriptions,
fields: fields.as_deref(),
offset,
limit,
},
)?;
}
SkillsAction::Info {
name,
paths,
summary_only,
} => {
cmd_skills_info(&resolve_paths(paths), &name, summary_only)?;
}
SkillsAction::Run {
paths,
script,
env_vars,
print_body,
name,
arguments,
} => {
cmd_skills_run(
&resolve_paths(paths),
&name,
script.as_deref(),
&env_vars,
print_body,
&arguments,
)
.await?;
}
SkillsAction::Create {
source,
output_dir,
auth_headers,
} => {
let headers = parse_headers(&auth_headers)?;
let skill_dir =
generator::generate_from_openapi(&source, &output_dir, &headers).await?;
println!("Generated skill at: {}", skill_dir.display());
}
SkillsAction::Install {
source,
path,
r#ref,
skills_path,
local,
global,
root,
} => {
let install_paths =
resolve_install_paths(resolve_skills_install_root(root), global, local)?;
let report = skill_install::install_skill(skill_install::SkillInstallRequest {
source: &source,
repo_subpath: path.as_deref(),
reference: r#ref.as_deref(),
install_paths: &install_paths,
skills_path: &skills_path,
})?;
println!(
"Installed skill `{}` to {} ({})",
report.name,
report.target_dir.display(),
report.install_scope.as_str()
);
}
SkillsAction::Update {
name,
skills_path,
local,
global,
root,
} => {
let install_paths =
resolve_install_paths(resolve_skills_install_root(root), global, local)?;
let reports = skill_install::update_skills(skill_install::SkillUpdateRequest {
name: name.as_deref(),
install_paths: &install_paths,
skills_path: &skills_path,
})?;
for report in reports {
println!(
"Updated skill `{}` at {} ({})",
report.name,
report.target_dir.display(),
report.install_scope.as_str()
);
}
}
},
Commands::Stdio {
command,
prompt,
resource_uri,
args,
list,
list_tools,
list_prompts,
list_resources,
search,
describe,
describe_tool,
format,
limit,
pretty,
env_vars,
cwd,
} => {
let env = parse_env_vars(&env_vars);
let client = ConnectedMcpClient::Stdio(
mcp_stdio::StdioClient::connect(&command, &env, cwd.as_deref()).await?,
);
let request = McpBridgeRequest {
prompt: prompt.as_deref(),
resource_uri: resource_uri.as_deref(),
args: &args,
list,
list_tools,
list_prompts,
list_resources,
search: search.as_deref(),
describe,
describe_tool: describe_tool.as_deref(),
format,
limit,
pretty,
};
let result = run_mcp_bridge_command(&client, request).await;
finish_connected_mcp_client(client, result).await?;
}
Commands::Http {
url,
prompt,
resource_uri,
args,
list,
list_tools,
list_prompts,
list_resources,
search,
describe,
describe_tool,
format,
limit,
pretty,
auth_headers,
timeout_seconds,
} => {
let headers = parse_headers(&auth_headers)?;
let client = ConnectedMcpClient::Http(
mcp_http::HttpClient::connect(&url, &headers, parse_timeout(timeout_seconds))
.await?,
);
let request = McpBridgeRequest {
prompt: prompt.as_deref(),
resource_uri: resource_uri.as_deref(),
args: &args,
list,
list_tools,
list_prompts,
list_resources,
search: search.as_deref(),
describe,
describe_tool: describe_tool.as_deref(),
format,
limit,
pretty,
};
let result = run_mcp_bridge_command(&client, request).await;
finish_connected_mcp_client(client, result).await?;
}
Commands::Mcp { action } => match action {
McpAction::Servers { pretty, format } => {
let store = BakeStore::load()?;
let servers = baked_mcp_servers(&store);
if let Some(format) = output::prefer_structured_output(format, pretty) {
let value = Value::Array(
servers
.iter()
.map(|config| {
json!({
"name": config.name,
"transport": match config.source_type {
SourceType::Stdio => "stdio",
SourceType::Http => "http",
_ => "unsupported",
},
"source": config.source,
"description": config.description,
})
})
.collect(),
);
println!("{}", output::format_structured_value(&value, format));
} else if servers.is_empty() {
println!("No baked MCP servers found.");
println!("Create one with: sxmc bake create NAME --type stdio --source '...'");
} else {
println!("MCP servers ({}):", servers.len());
for config in servers {
let transport = match config.source_type {
SourceType::Stdio => "stdio",
SourceType::Http => "http",
_ => "unsupported",
};
println!(" {} [{}]", config.name, transport);
if let Some(description) = &config.description {
println!(" {}", description);
}
}
}
}
McpAction::Tools {
server,
search,
limit,
} => {
let client = connect_named_baked_mcp_client(&server).await?;
let result = print_mcp_tools(&client, search.as_deref(), limit).await;
finish_connected_mcp_client(client, result).await?;
}
McpAction::Grep {
pattern,
server,
limit,
} => {
let store = BakeStore::load()?;
let mut results: Vec<(String, Tool)> = Vec::new();
let configs: Vec<BakeConfig> = if let Some(server) = server {
vec![get_baked_mcp_server(&store, &server)?]
} else {
baked_mcp_servers(&store).into_iter().cloned().collect()
};
for config in configs {
let server_name = config.name.clone();
let client = ConnectedMcpClient::connect(&config).await?;
let tools = client.list_tools().await?;
client.close().await?;
let pattern_lower = pattern.to_lowercase();
for tool in tools {
let name = tool.name.as_ref().to_lowercase();
let desc = tool.description.as_deref().unwrap_or("").to_lowercase();
if name.contains(&pattern_lower) || desc.contains(&pattern_lower) {
results.push((server_name.clone(), tool));
}
}
}
results.sort_by(|a, b| {
a.0.cmp(&b.0)
.then_with(|| a.1.name.as_ref().cmp(b.1.name.as_ref()))
});
println!("{}", format_mcp_grep_results(&results, &pattern, limit));
}
McpAction::Prompts { server, limit } => {
let client = connect_named_baked_mcp_client(&server).await?;
let result = print_mcp_prompts(&client, limit).await;
finish_connected_mcp_client(client, result).await?;
}
McpAction::Resources { server, limit } => {
let client = connect_named_baked_mcp_client(&server).await?;
let result = print_mcp_resources(&client, limit).await;
finish_connected_mcp_client(client, result).await?;
}
McpAction::Info {
target,
pretty,
format,
} => {
let (server, tool_name) = split_server_target(&target)?;
let client = connect_named_baked_mcp_client(server).await?;
let result = print_mcp_tool_info(&client, tool_name, pretty, format).await;
finish_connected_mcp_client(client, result).await?;
}
McpAction::Call {
target,
payload,
pretty,
} => {
let (server, tool_name) = split_server_target(&target)?;
let client = connect_named_baked_mcp_client(server).await?;
let result = call_mcp_tool(
&client,
tool_name,
payload,
pretty,
&format!("sxmc mcp info {}/{} --format toon", server, tool_name),
Some(&format!("sxmc mcp session {}", server)),
)
.await;
finish_connected_mcp_client(client, result).await?;
}
McpAction::Read { target, pretty } => {
let (server, resource_uri) = split_server_target(&target)?;
let client = connect_named_baked_mcp_client(server).await?;
let result = read_mcp_resource(&client, resource_uri, pretty).await;
finish_connected_mcp_client(client, result).await?;
}
McpAction::Prompt {
target,
args,
pretty,
} => {
let (server, prompt_name) = split_server_target(&target)?;
let client = connect_named_baked_mcp_client(server).await?;
let result = fetch_mcp_prompt(&client, prompt_name, &args, pretty).await;
finish_connected_mcp_client(client, result).await?;
}
McpAction::Session {
server,
script,
quiet,
} => {
let client = connect_named_baked_mcp_client(&server).await?;
let result = if let Some(script) = script {
let file = std::fs::File::open(&script).map_err(|e| {
sxmc::error::SxmcError::Other(format!(
"Failed to open session script '{}': {}",
script.display(),
e
))
})?;
let reader = std::io::BufReader::new(file);
run_mcp_session(&client, reader, quiet).await
} else {
let stdin = std::io::stdin();
let reader = stdin.lock();
run_mcp_session(&client, reader, quiet).await
};
finish_connected_mcp_client(client, result).await?;
}
},
Commands::Discover { action } => match action {
DiscoverAction::Cli {
command,
depth,
compact,
pretty,
format,
allow_self,
} => {
let profile = cli_surfaces::inspect_cli_with_depth(&command, allow_self, depth)?;
let value = if compact {
cli_surfaces::compact_profile_value(&profile)
} else {
cli_surfaces::profile_value(&profile)
};
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
}
}
DiscoverAction::Api {
source,
operation,
args,
list,
search,
compact,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields,
pretty,
format,
auth_headers,
timeout_seconds,
} => {
let headers = parse_headers(&auth_headers)?;
let client =
api::ApiClient::connect(&source, &headers, parse_timeout(timeout_seconds))
.await?;
if should_print_api_detection_banner(format, pretty) {
eprintln!("[sxmc] Detected {} API", client.api_type());
}
let arguments = parse_string_kv_args(&args);
cmd_api(
&client,
operation,
&arguments,
ApiCommandOptions {
list,
search: search.as_deref(),
compact,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields: fields.as_deref(),
pretty,
format,
},
)
.await?;
}
DiscoverAction::Graphql {
url,
operation,
args,
list,
search,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields,
schema,
type_name,
output,
pretty,
format,
auth_headers,
timeout_seconds,
} => {
let headers = parse_headers(&auth_headers)?;
let gql =
graphql::GraphQLClient::connect(&url, &headers, parse_timeout(timeout_seconds))
.await?;
if schema {
let value = gql.schema_summary_value(search.as_deref());
if let Some(path) = output.as_ref() {
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)?;
}
}
fs::write(path, serde_json::to_string_pretty(&value)?)?;
}
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"GraphQL schema at {}: {} types, {} operations",
value["url"].as_str().unwrap_or("<unknown>"),
value["type_count"].as_u64().unwrap_or(0),
value["operation_count"].as_u64().unwrap_or(0)
);
}
} else if let Some(type_name) = type_name {
let value = gql.type_value(&type_name).ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"GraphQL type '{}' was not found at '{}'.",
type_name, url
))
})?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"GraphQL type {} at {} ({} fields, {} input fields, {} enum values)",
value["name"].as_str().unwrap_or("<unknown>"),
value["url"].as_str().unwrap_or("<unknown>"),
value["field_count"].as_u64().unwrap_or(0),
value["input_field_count"].as_u64().unwrap_or(0),
value["enum_value_count"].as_u64().unwrap_or(0)
);
}
} else {
let client = api::ApiClient::GraphQL(gql);
let arguments = parse_string_kv_args(&args);
cmd_api(
&client,
operation,
&arguments,
ApiCommandOptions {
list,
search: search.as_deref(),
compact: false,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields: fields.as_deref(),
pretty,
format,
},
)
.await?;
}
}
DiscoverAction::GraphqlDiff {
before,
after,
url,
auth_headers,
timeout_seconds,
exit_code,
pretty,
format,
} => {
let before_value = graphql::load_graphql_schema_snapshot(&before)?;
let after_value = if let Some(after_path) = after.as_ref() {
graphql::load_graphql_schema_snapshot(after_path)?
} else {
let url = url.ok_or_else(|| {
sxmc::error::SxmcError::Other(
"discover graphql-diff requires either --after <snapshot.json> or --url <endpoint>".into(),
)
})?;
let headers = parse_headers(&auth_headers)?;
let gql = graphql::GraphQLClient::connect(
&url,
&headers,
parse_timeout(timeout_seconds),
)
.await?;
gql.schema_summary_value(None)
};
let value = graphql::diff_graphql_schema_value(&before_value, &after_value);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_graphql_diff_report(&value);
}
if exit_code && graphql_diff_has_changes(&value) {
std::process::exit(1);
}
}
DiscoverAction::Db {
source,
table,
list: _,
database_type,
search,
output,
compact,
counts_only,
offset,
limit,
fields,
pretty,
format,
} => {
let database_type = database_type.map(|value| match value {
cli_args::DbDiscoveryType::Sqlite => "sqlite",
cli_args::DbDiscoveryType::Postgres => "postgres",
});
let value = database::inspect_database(
&source,
database_type,
table.as_deref(),
search.as_deref(),
compact,
)?;
let value =
project_discovery_value(value, counts_only, fields.as_deref(), offset, limit);
if let Some(path) = output.as_ref() {
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)?;
}
}
fs::write(path, serde_json::to_string_pretty(&value)?)?;
}
if counts_only || fields.is_some() {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
} else if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_db_discovery_report(&value);
}
}
DiscoverAction::Codebase {
root,
output,
compact,
counts_only,
offset,
limit,
fields,
pretty,
format,
} => {
let root = root.unwrap_or(std::env::current_dir()?);
let value = project_discovery_value(
codebase::inspect_codebase(&root, compact)?,
counts_only,
fields.as_deref(),
offset,
limit,
);
if let Some(path) = output.as_ref() {
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)?;
}
}
fs::write(path, serde_json::to_string_pretty(&value)?)?;
}
if counts_only || fields.is_some() {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
} else if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_codebase_discovery_report(&value);
}
}
DiscoverAction::CodebaseDiff {
before,
after,
root,
exit_code,
pretty,
format,
} => {
let before_value = codebase::load_codebase_snapshot(&before)?;
let after_value = if let Some(after_path) = after.as_ref() {
codebase::load_codebase_snapshot(after_path)?
} else {
let root = root.unwrap_or(std::env::current_dir()?);
codebase::inspect_codebase(&root, false)?
};
let value = codebase::diff_codebase_value(&before_value, &after_value);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_codebase_diff_report(&value);
}
if exit_code && codebase_diff_has_changes(&value) {
std::process::exit(1);
}
}
DiscoverAction::Traffic {
source,
endpoint,
output,
list: _,
search,
compact,
counts_only,
offset,
limit,
fields,
pretty,
format,
} => {
let value = project_discovery_value(
traffic::inspect_traffic_source(
&source,
endpoint.as_deref(),
search.as_deref(),
compact,
)?,
counts_only,
fields.as_deref(),
offset,
limit,
);
if let Some(path) = output.as_ref() {
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)?;
}
}
fs::write(path, serde_json::to_string_pretty(&value)?)?;
}
if counts_only || fields.is_some() {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
} else if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_traffic_discovery_report(&value);
}
}
DiscoverAction::TrafficDiff {
before,
after,
source,
exit_code,
pretty,
format,
} => {
let before_value = traffic::load_traffic_snapshot(&before)?;
let after_value = if let Some(after_path) = after.as_ref() {
traffic::load_traffic_snapshot(after_path)?
} else {
let source = source.ok_or_else(|| {
sxmc::error::SxmcError::Other(
"discover traffic-diff requires either --after <snapshot.json> or --source <capture.har|curl-history.txt>".into(),
)
})?;
traffic::inspect_traffic_source(&source, None, None, false)?
};
let value = traffic::diff_traffic_value(&before_value, &after_value);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_traffic_diff_report(&value);
}
if exit_code && traffic_diff_has_changes(&value) {
std::process::exit(1);
}
}
},
Commands::Api {
source,
operation,
args,
list,
search,
compact,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields,
pretty,
format,
auth_headers,
timeout_seconds,
} => {
let headers = parse_headers(&auth_headers)?;
let client =
api::ApiClient::connect(&source, &headers, parse_timeout(timeout_seconds)).await?;
if should_print_api_detection_banner(format, pretty) {
eprintln!("[sxmc] Detected {} API", client.api_type());
}
let arguments = parse_string_kv_args(&args);
cmd_api(
&client,
operation,
&arguments,
ApiCommandOptions {
list,
search: search.as_deref(),
compact,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields: fields.as_deref(),
pretty,
format,
},
)
.await?;
}
Commands::Spec {
source,
operation,
args,
list,
search,
compact,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields,
pretty,
format,
auth_headers,
timeout_seconds,
} => {
let headers = parse_headers(&auth_headers)?;
let spec =
openapi::OpenApiSpec::load(&source, &headers, parse_timeout(timeout_seconds))
.await?;
if should_print_api_detection_banner(format, pretty) {
eprintln!("[sxmc] Loaded OpenAPI spec: {}", spec.title);
}
let client = api::ApiClient::OpenApi(spec);
let arguments = parse_string_kv_args(&args);
cmd_api(
&client,
operation,
&arguments,
ApiCommandOptions {
list,
search: search.as_deref(),
compact,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields: fields.as_deref(),
pretty,
format,
},
)
.await?;
}
Commands::Graphql {
url,
operation,
args,
list,
search,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields,
schema,
type_name,
output,
pretty,
format,
auth_headers,
timeout_seconds,
} => {
let headers = parse_headers(&auth_headers)?;
let gql =
graphql::GraphQLClient::connect(&url, &headers, parse_timeout(timeout_seconds))
.await?;
if schema {
let value = gql.schema_summary_value(search.as_deref());
if let Some(path) = output.as_ref() {
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)?;
}
}
fs::write(path, serde_json::to_string_pretty(&value)?)?;
}
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"GraphQL schema at {}: {} types, {} operations",
value["url"].as_str().unwrap_or("<unknown>"),
value["type_count"].as_u64().unwrap_or(0),
value["operation_count"].as_u64().unwrap_or(0)
);
}
} else if let Some(type_name) = type_name {
let value = gql.type_value(&type_name).ok_or_else(|| {
sxmc::error::SxmcError::Other(format!(
"GraphQL type '{}' was not found at '{}'.",
type_name, url
))
})?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"GraphQL type {} at {} ({} fields, {} input fields, {} enum values)",
value["name"].as_str().unwrap_or("<unknown>"),
value["url"].as_str().unwrap_or("<unknown>"),
value["field_count"].as_u64().unwrap_or(0),
value["input_field_count"].as_u64().unwrap_or(0),
value["enum_value_count"].as_u64().unwrap_or(0)
);
}
} else {
let client = api::ApiClient::GraphQL(gql);
let arguments = parse_string_kv_args(&args);
cmd_api(
&client,
operation,
&arguments,
ApiCommandOptions {
list,
search: search.as_deref(),
compact: false,
names_only,
required_only,
counts_only,
no_descriptions,
offset,
limit,
fields: fields.as_deref(),
pretty,
format,
},
)
.await?;
}
}
Commands::Scan {
paths,
skill,
mcp_stdio: mcp_stdio_cmd,
mcp,
severity,
json,
env_vars,
} => {
let min_severity = match severity.to_lowercase().as_str() {
"critical" => security::Severity::Critical,
"error" => security::Severity::Error,
"warn" | "warning" => security::Severity::Warning,
_ => security::Severity::Info,
};
let mut reports = Vec::new();
if let Some(ref mcp_cmd) = mcp_stdio_cmd {
let env = parse_env_vars(&env_vars);
let client = mcp_stdio::StdioClient::connect(mcp_cmd, &env, None).await?;
let tools = client.list_tools().await?;
let report = security::mcp_scanner::scan_tools(&tools, mcp_cmd);
reports.push(report);
client.close().await?;
} else if let Some(ref mcp_url) = mcp {
let client = mcp_http::HttpClient::connect(mcp_url, &[], None).await?;
let tools = client.list_tools().await?;
let report = security::mcp_scanner::scan_tools(&tools, mcp_url);
reports.push(report);
client.close().await?;
} else {
let search_paths = resolve_paths(paths);
let skill_dirs = discovery::discover_skills(&search_paths)?;
for dir in &skill_dirs {
let source = dir.parent().and_then(|p| p.to_str()).unwrap_or("unknown");
if let Ok(parsed_skill) = parser::parse_skill(dir, source) {
if let Some(ref target_name) = skill {
if parsed_skill.name != *target_name {
continue;
}
}
let report = security::skill_scanner::scan_skill(&parsed_skill);
reports.push(report);
}
}
}
let mut exit_code = 0;
if json {
let rendered_reports: Vec<Value> = reports
.iter()
.map(|report| report.filtered(min_severity).format_json())
.collect();
let json_value = if rendered_reports.len() == 1 {
rendered_reports
.into_iter()
.next()
.unwrap_or_else(|| json!({}))
} else {
json!({
"severity": severity,
"count": rendered_reports.len(),
"reports": rendered_reports,
})
};
println!("{}", serde_json::to_string_pretty(&json_value)?);
} else {
for report in &reports {
let filtered_report = report.filtered(min_severity);
if filtered_report.is_clean() {
println!(
"[PASS] {} — no issues at severity >= {}",
report.target, severity
);
} else {
println!("{}", filtered_report.format_text());
if filtered_report.has_errors() {
exit_code = 1;
}
}
}
}
if reports.is_empty() {
if skill.is_some() {
eprintln!("Skill not found");
std::process::exit(1);
}
println!("No skills found to scan.");
}
if exit_code != 0 {
std::process::exit(exit_code);
}
}
Commands::Inspect { action } => match action {
InspectAction::Cli {
command,
depth,
compact,
pretty,
format,
allow_self,
} => {
let profile = cli_surfaces::inspect_cli_with_depth(&command, allow_self, depth)?;
let value = if compact {
cli_surfaces::compact_profile_value(&profile)
} else {
cli_surfaces::profile_value(&profile)
};
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
}
}
InspectAction::Batch {
commands,
from_file,
retry_failed,
output_dir,
overwrite,
skip_existing,
depth,
since,
parallel,
progress,
compact,
pretty,
format,
allow_self,
} => {
let mut requests = cli_surfaces::load_batch_requests(
&commands,
from_file.as_deref(),
retry_failed.as_deref(),
)?;
for request in &mut requests {
if request.depth == 0 {
request.depth = depth;
}
}
if requests.is_empty() {
return Err(sxmc::error::SxmcError::Other(
"inspect batch requires at least one command spec or --from-file input"
.into(),
));
}
let since_filter = since
.as_deref()
.map(cli_surfaces::parse_batch_since_filter)
.transpose()?;
let mut written_profiles = Vec::new();
let mut slug_counts = HashMap::new();
let write_mode = if overwrite {
BatchOutputWriteMode::Overwrite
} else if skip_existing {
BatchOutputWriteMode::SkipExisting
} else {
BatchOutputWriteMode::Unique
};
let output_dir = output_dir.map(|path| {
if path.is_absolute() {
path
} else {
std::env::current_dir()
.unwrap_or_else(|_| PathBuf::from("."))
.join(path)
}
});
let preferred_format = output::prefer_structured_output(format, pretty);
let mut value = if matches!(
preferred_format,
Some(output::StructuredOutputFormat::Ndjson)
) {
let output_dir_ref = output_dir.clone();
let mut stream_error: Option<sxmc::error::SxmcError> = None;
let value = cli_surfaces::inspect_cli_batch_with_callback(
&requests,
allow_self,
parallel,
progress,
since_filter.as_ref(),
|event| {
if stream_error.is_some() {
return;
}
if let Some(dir) = output_dir_ref.as_ref() {
if event["type"] == "profile" {
match write_batch_profile_file(
dir,
event["command"].as_str().unwrap_or("<unknown>"),
&event["profile"],
compact,
&mut slug_counts,
write_mode,
) {
Ok(metadata) => written_profiles.push(metadata),
Err(error) => stream_error = Some(error),
}
}
}
println!(
"{}",
output::format_structured_value(
&batch_event_for_output(event, compact),
output::StructuredOutputFormat::Ndjson,
)
);
},
);
if let Some(error) = stream_error {
return Err(error);
}
value
} else {
cli_surfaces::inspect_cli_batch(
&requests,
allow_self,
parallel,
progress,
since_filter.as_ref(),
)
};
if let Some(dir) = output_dir.as_ref() {
if !matches!(
preferred_format,
Some(output::StructuredOutputFormat::Ndjson)
) {
if let Some(profiles) = value["profiles"].as_array() {
for profile in profiles {
written_profiles.push(write_batch_profile_file(
dir,
profile["command"].as_str().unwrap_or("<unknown>"),
profile,
compact,
&mut slug_counts,
write_mode,
)?);
}
}
}
attach_batch_output_dir_metadata(&mut value, dir, &written_profiles);
let manifest_path = write_batch_manifest_file(dir, &value)?;
if let Some(object) = value.as_object_mut() {
object.insert(
"written_manifest_path".into(),
Value::String(manifest_path.display().to_string()),
);
}
}
if let Some(format) = output::prefer_structured_output(format, pretty) {
if matches!(format, output::StructuredOutputFormat::Ndjson) {
println!(
"{}",
output::format_structured_value(
&json!({
"type": "summary",
"count": value["count"],
"inspected_count": value["inspected_count"],
"parallelism": value["parallelism"],
"success_count": value["success_count"],
"failed_count": value["failed_count"],
"skipped_count": value["skipped_count"],
"output_dir": value.get("output_dir").cloned().unwrap_or(Value::Null),
"written_profile_count": value.get("written_profile_count").cloned().unwrap_or(Value::from(0)),
"skipped_existing_count": value.get("skipped_existing_count").cloned().unwrap_or(Value::from(0)),
"written_manifest_path": value.get("written_manifest_path").cloned().unwrap_or(Value::Null),
}),
output::StructuredOutputFormat::Ndjson,
)
);
return Ok(());
}
if matches!(format, output::StructuredOutputFormat::Toon) {
println!("{}", format_batch_toon(&value, compact));
return Ok(());
}
let rendered = if compact {
let compact_profiles = value["profiles"]
.as_array()
.into_iter()
.flatten()
.filter_map(|profile| {
serde_json::from_value::<cli_surfaces::CliSurfaceProfile>(
profile.clone(),
)
.ok()
})
.map(|profile| cli_surfaces::compact_profile_value(&profile))
.collect::<Vec<_>>();
let compact_value = json!({
"count": value["count"],
"parallelism": value["parallelism"],
"success_count": value["success_count"],
"failed_count": value["failed_count"],
"skipped_count": value["skipped_count"],
"profiles": compact_profiles,
"failures": value["failures"],
"skipped": value["skipped"],
});
output::format_structured_value(&compact_value, format)
} else {
output::format_structured_value(&value, format)
};
println!("{rendered}");
} else {
print_batch_inspect_report(&value, compact);
}
}
InspectAction::Diff {
command,
before,
after,
depth,
exit_code,
watch,
pretty,
format,
allow_self,
} => {
let render_format = resolve_diff_output_format(format, pretty);
let render_once = || -> Result<Value> {
let before_profile = cli_surfaces::load_profile(&before)?;
let after_profile = if let Some(after_path) = after.as_ref() {
cli_surfaces::load_profile(after_path)?
} else {
let command = command.as_deref().ok_or_else(|| {
sxmc::error::SxmcError::Other(
"inspect diff requires either a live <command> or `--after <profile.json>`".into(),
)
})?;
cli_surfaces::inspect_cli_with_depth(command, allow_self, depth)?
};
Ok(cli_surfaces::diff_profile_value(
&before_profile,
&after_profile,
))
};
if let Some(interval) = watch {
let interval = Duration::from_secs(interval.max(1));
let mut last_rendered = None::<String>;
loop {
let value = render_once()?;
let rendered = diff_display_value(&value, render_format);
if last_rendered.as_ref() != Some(&rendered) {
println!("{rendered}");
println!();
std::io::stdout().flush()?;
last_rendered = Some(rendered);
}
std::thread::sleep(interval);
}
} else {
let value = render_once()?;
println!("{}", diff_display_value(&value, render_format));
if exit_code && diff_value_has_changes(&value) {
std::process::exit(1);
}
}
}
InspectAction::Profile {
input,
compact,
pretty,
format,
} => {
let profile = cli_surfaces::load_profile(&input)?;
let value = if compact {
cli_surfaces::compact_profile_value(&profile)
} else {
cli_surfaces::profile_value(&profile)
};
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
}
}
InspectAction::MigrateProfile {
input,
output: migrate_output,
pretty,
format,
} => {
let profile = cli_surfaces::load_profile(&input)?;
let value = cli_surfaces::profile_value(&profile);
if let Some(path) = migrate_output {
let path = if path.is_absolute() {
path
} else {
std::env::current_dir()
.unwrap_or_else(|_| PathBuf::from("."))
.join(path)
};
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&path, serde_json::to_string_pretty(&value)?)?;
let report = json!({
"command": profile.command,
"input": input.display().to_string(),
"output": path.display().to_string(),
"profile_schema": profile.profile_schema,
});
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&report, format));
} else {
print_migrated_profile_report(&report);
}
} else if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
}
}
InspectAction::Drift {
inputs,
root,
recursive,
exit_code,
pretty,
format,
allow_self,
} => {
let root = resolve_generation_root(root)?;
let use_default_recursive = inputs.is_empty();
let profile_inputs = if use_default_recursive {
vec![default_saved_profiles_dir(&root)]
} else {
inputs
.into_iter()
.map(|path| {
if path.is_absolute() {
path
} else {
root.join(path)
}
})
.collect::<Vec<_>>()
};
let profile_paths =
collect_profile_paths(&profile_inputs, recursive || use_default_recursive)?;
let value = drift_value(&profile_paths, allow_self);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_drift_report(&value);
}
if exit_code && value["changed_count"].as_u64().unwrap_or(0) > 0 {
std::process::exit(1);
}
}
InspectAction::BundleExport {
inputs,
root,
recursive,
bundle_name,
description,
role,
hosts,
output,
signature_secret,
signing_key,
pretty,
format,
} => {
let root = resolve_generation_root(root)?;
let signature_secret = parse_optional_secret(signature_secret)?;
let signing_key = signing_key.map(|path| {
if path.is_absolute() {
path
} else {
root.join(path)
}
});
let use_default_recursive = inputs.is_empty();
let profile_inputs = if use_default_recursive {
vec![default_saved_profiles_dir(&root)]
} else {
inputs
.into_iter()
.map(|path| {
if path.is_absolute() {
path
} else {
root.join(path)
}
})
.collect::<Vec<_>>()
};
let profile_paths =
collect_profile_paths(&profile_inputs, recursive || use_default_recursive)?;
let value = sign_bundle_value(
export_profile_bundle_value(
&profile_paths,
bundle_name.as_deref(),
description.as_deref(),
role.as_deref(),
&hosts,
)?,
signature_secret.as_deref(),
signing_key.as_deref(),
)?;
let output_path = if output.is_absolute() {
output
} else {
root.join(output)
};
if let Some(parent) = output_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&output_path, serde_json::to_string_pretty(&value)?)?;
let sha256 = bundle_sha256_from_value(&value)?;
let report = json!({
"bundle_schema": PROFILE_BUNDLE_SCHEMA,
"output": output_path.display().to_string(),
"profile_count": value["profile_count"],
"sha256": sha256,
"signature": bundle_signature_report(&value),
"metadata": value["metadata"],
"entries": value["entries"],
});
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&report, format));
} else {
println!(
"Exported {} profiles to {}",
report["profile_count"].as_u64().unwrap_or(0),
report["output"].as_str().unwrap_or("<unknown>")
);
}
}
InspectAction::BundleImport {
input,
root,
output_dir,
overwrite,
skip_existing,
pretty,
format,
} => {
let root = resolve_generation_root(root)?;
let output_dir = output_dir.unwrap_or_else(|| default_saved_profiles_dir(&root));
let output_dir = if output_dir.is_absolute() {
output_dir
} else {
root.join(output_dir)
};
let mode = if overwrite {
BundleImportMode::Overwrite
} else if skip_existing {
BundleImportMode::SkipExisting
} else {
BundleImportMode::Unique
};
let value = import_profile_bundle_value(&input, &output_dir, mode)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Imported {} profiles into {} ({} skipped)",
value["imported_count"].as_u64().unwrap_or(0),
value["output_dir"].as_str().unwrap_or("<unknown>"),
value["skipped_count"].as_u64().unwrap_or(0)
);
}
}
InspectAction::BundleVerify {
input,
auth_headers,
timeout_seconds,
expected_sha256,
signature_secret,
public_key,
pretty,
format,
} => {
let headers = parse_headers(&auth_headers)?;
let signature_secret = parse_optional_secret(signature_secret)?;
let public_key = public_key.as_deref();
let bundle_value =
read_bundle_source(&input, &headers, parse_timeout(timeout_seconds)).await?;
let sha256 =
verify_bundle_digest(&bundle_value, expected_sha256.as_deref(), &input)?;
let signature = verify_bundle_signature(
&bundle_value,
signature_secret.as_deref(),
public_key,
&input,
)?;
let report = json!({
"bundle_schema": PROFILE_BUNDLE_SCHEMA,
"input": input,
"sha256": sha256,
"signature": signature,
"verified": true,
"expected_sha256": expected_sha256,
"profile_count": bundle_value["profile_count"],
"metadata": bundle_value.get("metadata").cloned().unwrap_or(Value::Null),
});
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&report, format));
} else {
println!(
"Verified bundle {} ({} profiles)",
report["input"].as_str().unwrap_or("<unknown>"),
report["profile_count"].as_u64().unwrap_or(0)
);
}
}
InspectAction::BundleKeygen {
output_dir,
pretty,
format,
} => {
let value = generate_bundle_keypair_value(&output_dir)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Generated Ed25519 bundle signing keys in {}",
output_dir.display()
);
}
}
InspectAction::ExportCorpus {
inputs,
root,
recursive,
output,
pretty,
format,
} => {
let root = resolve_generation_root(root)?;
let use_default_recursive = inputs.is_empty();
let profile_inputs = if use_default_recursive {
vec![default_saved_profiles_dir(&root)]
} else {
inputs
.into_iter()
.map(|path| {
if path.is_absolute() {
path
} else {
root.join(path)
}
})
.collect::<Vec<_>>()
};
let profile_paths =
collect_profile_paths(&profile_inputs, recursive || use_default_recursive)?;
let value = export_profile_corpus_value(&profile_paths);
let render_format = output::resolve_structured_format(format, pretty);
let rendered = if matches!(render_format, output::StructuredOutputFormat::Ndjson) {
let entries =
Value::Array(value["entries"].as_array().cloned().unwrap_or_default());
output::format_structured_value(
&entries,
output::StructuredOutputFormat::Ndjson,
)
} else {
output::format_structured_value(&value, render_format)
};
if let Some(output_path) = output {
let output_path = if output_path.is_absolute() {
output_path
} else {
root.join(output_path)
};
if let Some(parent) = output_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&output_path, rendered)?;
let report = json!({
"corpus_schema": PROFILE_CORPUS_SCHEMA,
"output": output_path.display().to_string(),
"count": value["count"],
"error_count": value["error_count"],
});
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&report, format));
} else {
println!(
"Exported {} corpus entries to {}",
report["count"].as_u64().unwrap_or(0),
report["output"].as_str().unwrap_or("<unknown>")
);
}
} else {
println!("{rendered}");
}
}
InspectAction::CorpusStats {
input,
pretty,
format,
} => {
let value = load_corpus_value(&input)?;
let stats = corpus_stats_value(&value);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&stats, format));
} else {
print_corpus_stats_report(&stats);
}
}
InspectAction::CorpusQuery {
input,
command,
search,
limit,
pretty,
format,
} => {
let value = load_corpus_value(&input)?;
let query =
corpus_query_value(&value, command.as_deref(), search.as_deref(), limit);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&query, format));
} else {
print_corpus_query_report(&query);
}
}
InspectAction::KnownGood {
input,
command,
pretty,
format,
} => {
let value = known_good_value(&input, &command)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Selected known-good profile for {} from {}",
command,
value["selected"]["source"].as_str().unwrap_or("<unknown>")
);
}
}
InspectAction::TrustReport {
input,
auth_headers,
timeout_seconds,
expected_sha256,
signature_secret,
public_key,
pretty,
format,
} => {
let headers = parse_headers(&auth_headers)?;
let signature_secret = parse_optional_secret(signature_secret)?;
let bundle_value =
read_bundle_source(&input, &headers, parse_timeout(timeout_seconds)).await?;
let sha256 =
verify_bundle_digest(&bundle_value, expected_sha256.as_deref(), &input)?;
let signature = verify_bundle_signature(
&bundle_value,
signature_secret.as_deref(),
public_key.as_deref(),
&input,
)?;
let report = trust_report_value(
&input,
&bundle_value,
sha256,
signature,
expected_sha256.as_deref(),
);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&report, format));
} else {
println!(
"Trust report for {}: {} profiles, avg quality {:.1}",
report["input"].as_str().unwrap_or("<unknown>"),
report["quality"]["profile_count"].as_u64().unwrap_or(0),
report["quality"]["average_quality_score"]
.as_f64()
.unwrap_or(0.0)
);
}
}
InspectAction::TrustPolicy {
input,
auth_headers,
timeout_seconds,
expected_sha256,
signature_secret,
public_key,
require_signature,
require_verified_signature,
min_average_quality,
max_stale_count,
min_ready_count,
require_role,
require_hosts,
exit_code,
pretty,
format,
} => {
let headers = parse_headers(&auth_headers)?;
let signature_secret = parse_optional_secret(signature_secret)?;
let bundle_value =
read_bundle_source(&input, &headers, parse_timeout(timeout_seconds)).await?;
let sha256 =
verify_bundle_digest(&bundle_value, expected_sha256.as_deref(), &input)?;
let signature = verify_bundle_signature(
&bundle_value,
signature_secret.as_deref(),
public_key.as_deref(),
&input,
)?;
let report = trust_report_value(
&input,
&bundle_value,
sha256,
signature,
expected_sha256.as_deref(),
);
let policy = trust_policy_value(
&report,
TrustPolicyConfig {
require_signature,
require_verified_signature,
min_average_quality,
max_stale_count,
min_ready_count,
require_role: require_role.as_deref(),
require_hosts: &require_hosts,
},
);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&policy, format));
} else {
println!(
"Trust policy for {}: {}",
policy["input"].as_str().unwrap_or("<unknown>"),
if policy["passed"].as_bool() == Some(true) {
"passed"
} else {
"failed"
}
);
}
if exit_code && policy["passed"].as_bool() != Some(true) {
std::process::exit(1);
}
}
InspectAction::RegistryInit {
dir,
pretty,
format,
} => {
let value = registry_init_value(&dir)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!("Initialized local registry at {}", dir.display());
}
}
InspectAction::RegistryAdd {
bundle,
registry,
auth_headers,
timeout_seconds,
pretty,
format,
} => {
let headers = parse_headers(&auth_headers)?;
let bundle_value =
read_bundle_source(&bundle, &headers, parse_timeout(timeout_seconds)).await?;
let sha256 = bundle_sha256_from_value(&bundle_value)?;
let value = registry_add_bundle_value(®istry, &bundle, &bundle_value, &sha256)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!("Added bundle {} to registry {}", bundle, registry.display());
}
}
InspectAction::RegistryList {
registry,
pretty,
format,
} => {
let value = load_registry_value(®istry)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Registry {} has {} bundle entries",
registry.display(),
value["entries"]
.as_array()
.map(|items| items.len())
.unwrap_or(0)
);
}
}
InspectAction::RegistryServe {
registry,
host,
port,
max_concurrency,
max_request_bytes,
} => {
serve_registry_http(
®istry,
&host,
port,
HttpServeLimits {
max_concurrency,
max_request_body_bytes: max_request_bytes,
},
)
.await?;
}
InspectAction::RegistryPush {
bundle,
registry,
auth_headers,
timeout_seconds,
pretty,
format,
} => {
let headers = parse_headers(&auth_headers)?;
let bundle_value =
read_bundle_source(&bundle, &headers, parse_timeout(timeout_seconds)).await?;
let value = registry_push_target(
®istry,
&bundle,
&bundle_value,
&headers,
parse_timeout(timeout_seconds),
)
.await?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Pushed bundle {} to registry {}",
value["sha256"].as_str().unwrap_or("<unknown>"),
value["registry"].as_str().unwrap_or("<unknown>")
);
}
}
InspectAction::RegistrySync {
source,
registry,
auth_headers,
timeout_seconds,
pretty,
format,
} => {
let headers = parse_headers(&auth_headers)?;
let value = registry_sync_value(
&source,
®istry,
&headers,
parse_timeout(timeout_seconds),
)
.await?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Synced {} bundle entries into registry {}",
value["imported_count"].as_u64().unwrap_or(0),
registry.display()
);
}
}
InspectAction::RegistryPull {
name,
registry,
root,
output_dir,
overwrite,
skip_existing,
pretty,
format,
} => {
let root = resolve_generation_root(root)?;
let output_dir = output_dir.unwrap_or_else(|| default_saved_profiles_dir(&root));
let output_dir = if output_dir.is_absolute() {
output_dir
} else {
root.join(output_dir)
};
let mode = if overwrite {
BundleImportMode::Overwrite
} else if skip_existing {
BundleImportMode::SkipExisting
} else {
BundleImportMode::Unique
};
let value = registry_pull_value(®istry, &name, &output_dir, mode)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Pulled registry bundle {} into {}",
name,
output_dir.display()
);
}
}
InspectAction::CacheStats { pretty, format } => {
let value = cli_surfaces::cache_stats_value()?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_cache_stats_report(&value);
}
}
InspectAction::CacheClear { pretty, format } => {
let value = cli_surfaces::clear_profile_cache_value()?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!(
"Cleared CLI profile cache at {} ({} entries remain, {} bytes)",
value["path"].as_str().unwrap_or("<unknown>"),
value["entry_count"].as_u64().unwrap_or(0),
value["total_bytes"].as_u64().unwrap_or(0)
);
}
}
InspectAction::CacheInvalidate {
command,
dry_run,
pretty,
format,
} => {
let value = cli_surfaces::invalidate_profile_cache_value(&command, dry_run)?;
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else if value["dry_run"].as_bool().unwrap_or(false) {
println!(
"Would invalidate {} cached profile entries for `{}` ({} entries would remain)",
value["matched_entries"].as_u64().unwrap_or(0),
value["command"].as_str().unwrap_or("<unknown>"),
value["remaining_entries"].as_u64().unwrap_or(0)
);
} else {
println!(
"Invalidated {} cached profile entries for `{}` ({} entries remain)",
value["removed_entries"].as_u64().unwrap_or(0),
value["command"].as_str().unwrap_or("<unknown>"),
value["remaining_entries"].as_u64().unwrap_or(0)
);
}
}
InspectAction::CacheWarm {
commands,
from_file,
depth,
since,
parallel,
progress,
pretty,
format,
allow_self,
} => {
let mut requests =
cli_surfaces::load_batch_requests(&commands, from_file.as_deref(), None)?;
for request in &mut requests {
if request.depth == 0 {
request.depth = depth;
}
}
if requests.is_empty() {
return Err(sxmc::error::SxmcError::Other(
"inspect cache-warm requires at least one command spec or --from-file input"
.into(),
));
}
let since_filter = since
.as_deref()
.map(cli_surfaces::parse_batch_since_filter)
.transpose()?;
let value = cli_surfaces::warm_profile_cache(
&requests,
allow_self,
parallel,
progress,
since_filter.as_ref(),
);
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
print_cache_warm_report(&value);
}
}
},
Commands::Publish {
target,
inputs,
root,
recursive,
bundle_name,
description,
role,
hosts,
auth_headers,
timeout_seconds,
signature_secret,
signing_key,
pretty,
format,
} => {
let root = resolve_generation_root(root)?;
let signature_secret = parse_optional_secret(signature_secret)?;
let signing_key = signing_key.map(|path| {
if path.is_absolute() {
path
} else {
root.join(path)
}
});
let use_default_recursive = inputs.is_empty();
let profile_inputs = if use_default_recursive {
vec![default_saved_profiles_dir(&root)]
} else {
inputs
.into_iter()
.map(|path| {
if path.is_absolute() {
path
} else {
root.join(path)
}
})
.collect::<Vec<_>>()
};
let profile_paths =
collect_profile_paths(&profile_inputs, recursive || use_default_recursive)?;
let bundle_value = sign_bundle_value(
export_profile_bundle_value(
&profile_paths,
bundle_name.as_deref(),
description.as_deref(),
role.as_deref(),
&hosts,
)?,
signature_secret.as_deref(),
signing_key.as_deref(),
)?;
let resolved_target = if is_http_target(&target) || target.starts_with("file://") {
target.clone()
} else {
root.join(&target).display().to_string()
};
let headers = parse_headers(&auth_headers)?;
let destination = publish_bundle_target(
&resolved_target,
&bundle_value,
&headers,
parse_timeout(timeout_seconds),
)
.await?;
let sha256 = bundle_sha256_from_value(&bundle_value)?;
let report = json!({
"bundle_schema": PROFILE_BUNDLE_SCHEMA,
"target": destination["target"],
"transport": destination["transport"],
"http_status": destination.get("http_status").cloned().unwrap_or(Value::Null),
"profile_count": bundle_value["profile_count"],
"sha256": sha256,
"signature": bundle_signature_report(&bundle_value),
"metadata": bundle_value["metadata"],
"entries": bundle_value["entries"],
});
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&report, format));
} else {
println!(
"Published {} profiles to {}",
report["profile_count"].as_u64().unwrap_or(0),
report["target"].as_str().unwrap_or("<unknown>")
);
}
}
Commands::Pull {
source,
root,
output_dir,
overwrite,
skip_existing,
auth_headers,
timeout_seconds,
expected_sha256,
signature_secret,
public_key,
pretty,
format,
} => {
let root = resolve_generation_root(root)?;
let signature_secret = parse_optional_secret(signature_secret)?;
let output_dir = output_dir.unwrap_or_else(|| default_saved_profiles_dir(&root));
let output_dir = if output_dir.is_absolute() {
output_dir
} else {
root.join(output_dir)
};
let mode = if overwrite {
BundleImportMode::Overwrite
} else if skip_existing {
BundleImportMode::SkipExisting
} else {
BundleImportMode::Unique
};
let resolved_source = if is_http_target(&source) || source.starts_with("file://") {
source.clone()
} else {
root.join(&source).display().to_string()
};
let headers = parse_headers(&auth_headers)?;
let bundle_value =
read_bundle_source(&resolved_source, &headers, parse_timeout(timeout_seconds))
.await?;
let sha256 =
verify_bundle_digest(&bundle_value, expected_sha256.as_deref(), &resolved_source)?;
let signature = verify_bundle_signature(
&bundle_value,
signature_secret.as_deref(),
public_key.as_deref(),
&resolved_source,
)?;
let value = import_profile_bundle_from_value(
&resolved_source,
bundle_value,
&output_dir,
mode,
)?;
let mut report = value;
if let Some(object) = report.as_object_mut() {
object.insert("sha256".into(), Value::from(sha256));
object.insert("signature".into(), signature);
object.insert(
"expected_sha256".into(),
expected_sha256.map(Value::from).unwrap_or(Value::Null),
);
}
if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&report, format));
} else {
println!(
"Pulled {} profiles from {} into {}",
report["imported_count"].as_u64().unwrap_or(0),
report["input"].as_str().unwrap_or("<unknown>"),
report["output_dir"].as_str().unwrap_or("<unknown>")
);
}
}
Commands::Add {
command,
depth,
root,
global,
local,
skills_path,
hosts,
preview,
allow_low_confidence,
allow_self,
pretty,
format,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let profile = cli_surfaces::inspect_cli_with_depth(&command, allow_self, depth)?;
ensure_profile_ready_for_agent_docs(&profile, allow_low_confidence)?;
let render_format = explicit_structured_format(format, pretty);
let auto_detected_hosts = hosts.is_empty();
let selected_hosts = if auto_detected_hosts {
auto_detect_add_hosts(&install_paths)
} else {
hosts
};
let has_selected_hosts = !selected_hosts.is_empty();
let apply = has_selected_hosts && !preview;
let auto_previewed_due_to_missing_hosts = !has_selected_hosts;
if render_format.is_none() && apply {
println!("Detected AI hosts: {}", host_label_list(&selected_hosts));
} else if render_format.is_none() && has_selected_hosts {
println!("Previewing onboarding for AI hosts: {}", host_label_list(&selected_hosts));
} else if render_format.is_none() {
println!(
"No AI hosts detected for the {} install scope. Previewing the full onboarding plan instead.",
install_paths.scope().as_str()
);
println!(
"Tip: install a supported host runtime or pass --host <name> to apply directly."
);
}
let (artifacts, selected_hosts) = resolve_cli_ai_init_artifacts(
&profile,
AiCoverage::Full,
None,
&selected_hosts,
&install_paths,
&skills_path,
if apply {
ArtifactMode::Apply
} else {
ArtifactMode::Preview
},
)?;
let outcomes = if apply {
cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
&install_paths,
&selected_hosts,
)?
} else if has_selected_hosts {
cli_surfaces::preview_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
&install_paths,
&selected_hosts,
)?
} else {
cli_surfaces::preview_artifacts(&artifacts, ArtifactMode::Apply, &install_paths)?
};
if let Some(format) = render_format {
let value = add_result_value(AddResultContext {
install_paths: &install_paths,
command: &command,
profile: &profile,
hosts: &selected_hosts,
outcomes: &outcomes,
auto_detected_hosts,
preview_requested: preview,
auto_previewed_due_to_missing_hosts,
});
println!("{}", output::format_structured_value(&value, format));
} else if apply {
print_write_outcomes(&outcomes);
} else {
print_preview_outcomes(&outcomes);
}
}
Commands::Setup {
tools,
limit,
depth,
root,
global,
local,
skills_path,
hosts,
preview,
allow_low_confidence,
allow_self,
pretty,
format,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let auto_detected_tools = tools.is_empty();
let tools = if auto_detected_tools {
detect_setup_tools(limit)
} else {
tools
};
if tools.is_empty() {
return Err(sxmc::error::SxmcError::Other(
"No CLI tools were selected or auto-detected. Re-run with `--tool <name>` or install one of the common tools Sumac scans for.".into(),
));
}
let render_format = explicit_structured_format(format, pretty);
let auto_detected_hosts = hosts.is_empty();
let selected_hosts = if auto_detected_hosts {
auto_detect_add_hosts(&install_paths)
} else {
hosts
};
let has_selected_hosts = !selected_hosts.is_empty();
let apply = has_selected_hosts && !preview;
let auto_previewed_due_to_missing_hosts = !has_selected_hosts;
if render_format.is_none() {
println!("Selected tools: {}", tools.join(", "));
}
if render_format.is_none() && apply {
println!("Detected AI hosts: {}", host_label_list(&selected_hosts));
} else if render_format.is_none() && has_selected_hosts {
println!("Previewing onboarding for AI hosts: {}", host_label_list(&selected_hosts));
} else if render_format.is_none() {
println!(
"No AI hosts detected for the {} install scope. Previewing the full onboarding plan instead.",
install_paths.scope().as_str()
);
println!(
"Tip: install a supported host runtime or pass --host <name> to apply directly."
);
}
let mut tool_results = Vec::new();
for command in &tools {
if render_format.is_none() {
println!("Onboarding tool: {}", command);
}
let profile = cli_surfaces::inspect_cli_with_depth(command, allow_self, depth)?;
ensure_profile_ready_for_agent_docs(&profile, allow_low_confidence)?;
let (artifacts, selected_hosts) = resolve_cli_ai_init_artifacts(
&profile,
AiCoverage::Full,
None,
&selected_hosts,
&install_paths,
&skills_path,
if apply {
ArtifactMode::Apply
} else {
ArtifactMode::Preview
},
)?;
let outcomes = if apply {
cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
&install_paths,
&selected_hosts,
)?
} else if has_selected_hosts {
cli_surfaces::preview_artifacts_with_apply_selection(
&artifacts,
ArtifactMode::Apply,
&install_paths,
&selected_hosts,
)?
} else {
cli_surfaces::preview_artifacts(
&artifacts,
ArtifactMode::Apply,
&install_paths,
)?
};
if render_format.is_none() && apply {
print_write_outcomes(&outcomes);
} else if render_format.is_none() {
print_preview_outcomes(&outcomes);
}
tool_results.push(json!({
"tool": command,
"profile": profile_summary_value(&profile),
"outcomes": write_outcomes_value(&outcomes),
"outcome_summary": write_outcome_summary_value(&outcomes),
}));
}
if let Some(format) = render_format {
let value = setup_result_value(SetupResultContext {
install_paths: &install_paths,
tools: &tools,
tool_results: &tool_results,
auto_detected_tools,
hosts: &selected_hosts,
auto_detected_hosts,
preview_requested: preview,
auto_previewed_due_to_missing_hosts,
});
println!("{}", output::format_structured_value(&value, format));
}
}
Commands::Init { action } => match action {
InitAction::Ai {
from_cli,
depth,
coverage,
client,
hosts,
skills_path,
root,
global,
local,
mode,
remove,
allow_low_confidence,
allow_self,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let profile = cli_surfaces::inspect_cli_with_depth(&from_cli, allow_self, depth)?;
if !remove {
ensure_profile_ready_for_agent_docs(&profile, allow_low_confidence)?;
}
let (artifacts, selected_hosts) = resolve_cli_ai_init_artifacts(
&profile,
coverage,
client,
&hosts,
&install_paths,
&skills_path,
mode,
)?;
if remove {
let outcomes = cli_surfaces::remove_artifacts_with_apply_selection(
&artifacts,
mode,
&install_paths,
&selected_hosts,
)?;
print_remove_outcomes(&outcomes);
} else {
let outcomes = cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
mode,
&install_paths,
&selected_hosts,
)?;
print_write_outcomes(&outcomes);
}
}
InitAction::Discovery {
snapshot,
coverage,
client,
hosts,
root,
global,
local,
mode,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let snapshots = load_discovery_snapshots(&snapshot)?;
let (artifacts, selected_hosts) = resolve_discovery_init_artifacts(
&snapshots,
coverage,
client,
&hosts,
&install_paths,
mode,
)?;
if mode == ArtifactMode::Preview {
let outcomes = if coverage == AiCoverage::Full && selected_hosts.is_empty() {
cli_surfaces::preview_artifacts(
&artifacts,
ArtifactMode::Apply,
&install_paths,
)?
} else {
cli_surfaces::preview_artifacts_with_apply_selection(
&artifacts,
if coverage == AiCoverage::Full {
ArtifactMode::Apply
} else {
ArtifactMode::Preview
},
&install_paths,
&selected_hosts,
)?
};
print_preview_outcomes(&outcomes);
} else {
let outcomes = cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
mode,
&install_paths,
&selected_hosts,
)?;
print_write_outcomes(&outcomes);
}
}
},
Commands::Scaffold { action } => match action {
ScaffoldAction::Ci {
from_profile,
root,
output_dir,
mode,
} => {
let root = resolve_generation_root(root)?;
let install_paths = InstallPaths::local(root.clone());
let profile = cli_surfaces::load_profile(&from_profile)?;
let artifact =
cli_surfaces::generate_ci_workflow_artifact(&profile, &root, &output_dir);
let outcomes =
cli_surfaces::materialize_artifacts(&[artifact], mode, &install_paths)?;
print_write_outcomes(&outcomes);
}
ScaffoldAction::Skill {
from_profile,
root,
output_dir,
mode,
} => {
let root = resolve_generation_root(root)?;
let install_paths = InstallPaths::local(root.clone());
let profile = cli_surfaces::load_profile(&from_profile)?;
let artifacts =
cli_surfaces::generate_skill_artifacts(&profile, &root, &output_dir);
let outcomes =
cli_surfaces::materialize_artifacts(&artifacts, mode, &install_paths)?;
print_write_outcomes(&outcomes);
}
ScaffoldAction::AgentDoc {
from_profile,
coverage,
client,
hosts,
root,
global,
local,
mode,
allow_low_confidence,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let profile = cli_surfaces::load_profile(&from_profile)?;
ensure_profile_ready_for_agent_docs(&profile, allow_low_confidence)?;
let (artifacts, selected_hosts) = resolve_cli_ai_agent_doc_artifacts(
&profile,
coverage,
client,
&hosts,
&install_paths,
mode,
)?;
let outcomes = cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
mode,
&install_paths,
&selected_hosts,
)?;
print_write_outcomes(&outcomes);
}
ScaffoldAction::ClientConfig {
from_profile,
coverage,
client,
hosts,
skills_path,
root,
global,
local,
mode,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let profile = cli_surfaces::load_profile(&from_profile)?;
let (artifacts, selected_hosts) = resolve_cli_ai_client_config_artifacts(
&profile,
coverage,
client,
&hosts,
&install_paths,
&skills_path,
mode,
)?;
let outcomes = cli_surfaces::materialize_artifacts_with_apply_selection(
&artifacts,
mode,
&install_paths,
&selected_hosts,
)?;
print_write_outcomes(&outcomes);
}
ScaffoldAction::McpWrapper {
from_profile,
root,
output_dir,
mode,
} => {
let root = resolve_generation_root(root)?;
let install_paths = InstallPaths::local(root.clone());
let profile = cli_surfaces::load_profile(&from_profile)?;
let artifacts =
cli_surfaces::generate_mcp_wrapper_artifacts(&profile, &root, &output_dir)?;
let outcomes =
cli_surfaces::materialize_artifacts(&artifacts, mode, &install_paths)?;
print_write_outcomes(&outcomes);
}
ScaffoldAction::LlmTxt {
from_profile,
root,
mode,
} => {
let root = resolve_generation_root(root)?;
let install_paths = InstallPaths::local(root.clone());
let profile = cli_surfaces::load_profile(&from_profile)?;
let artifact = cli_surfaces::generate_llms_txt_artifact(&profile, &root);
let outcomes =
cli_surfaces::materialize_artifacts(&[artifact], mode, &install_paths)?;
match mode {
ArtifactMode::Preview | ArtifactMode::Patch => {
print_preview_outcomes(&outcomes)
}
ArtifactMode::WriteSidecar | ArtifactMode::Apply => {
print_write_outcomes(&outcomes)
}
}
}
ScaffoldAction::DiscoveryPack {
from_snapshot,
root,
output_dir,
mode,
} => {
let root = resolve_generation_root(root)?;
let outputs = discovery_pack_output_entries(&from_snapshot, &output_dir)?;
let outcomes = materialize_text_outputs(&outputs, mode, &root)?;
match mode {
ArtifactMode::Preview | ArtifactMode::Patch => {
print_preview_outcomes(&outcomes)
}
ArtifactMode::WriteSidecar | ArtifactMode::Apply => {
print_write_outcomes(&outcomes)
}
}
}
ScaffoldAction::DiscoveryTools {
from_snapshot,
root,
output_dir,
mode,
} => {
let root = resolve_generation_root(root)?;
let outputs = discovery_tools_output_entries(&from_snapshot, &output_dir)?;
let outcomes = materialize_text_outputs(&outputs, mode, &root)?;
match mode {
ArtifactMode::Preview | ArtifactMode::Patch => {
print_preview_outcomes(&outcomes)
}
ArtifactMode::WriteSidecar | ArtifactMode::Apply => {
print_write_outcomes(&outcomes)
}
}
}
},
Commands::Bake { action } => match action {
BakeAction::Create {
name,
source_type,
source,
description,
auth_headers,
env_vars,
timeout_seconds,
base_dir,
skip_validate,
} => {
let st = parse_source_type(&source_type);
let config = BakeConfig {
name: name.clone(),
source_type: st,
source,
base_dir: base_dir.or_else(|| std::env::current_dir().ok()),
auth_headers,
env_vars,
timeout_seconds,
description,
};
if !skip_validate {
validate_bake_config(&config).await?;
}
let mut store = BakeStore::load()?;
store.create(config)?;
println!("Created bake: {}", name);
}
BakeAction::List => {
let store = BakeStore::load()?;
let configs = store.list();
if configs.is_empty() {
println!("No baked configs.");
} else {
for config in configs {
println!("{}", config);
}
}
}
BakeAction::Show { name } => {
let store = BakeStore::load()?;
if let Some(config) = store.show(&name) {
println!("Name: {}", config.name);
println!("Type: {:?}", config.source_type);
println!("Source: {}", config.source);
if let Some(ref base_dir) = config.base_dir {
println!("Base dir: {}", base_dir.display());
}
if let Some(ref desc) = config.description {
println!("Description: {}", desc);
}
if !config.auth_headers.is_empty() {
println!("Auth headers: {}", config.auth_headers.len());
}
if !config.env_vars.is_empty() {
println!("Env vars: {}", config.env_vars.len());
}
if let Some(timeout) = config.timeout_seconds {
println!("Timeout: {}s", timeout);
}
} else {
eprintln!("Bake '{}' not found", name);
std::process::exit(1);
}
}
BakeAction::Update {
name,
source_type,
source,
description,
auth_headers,
env_vars,
timeout_seconds,
base_dir,
skip_validate,
} => {
let mut store = BakeStore::load()?;
let existing = match store.show(&name) {
Some(config) => config.clone(),
None => {
eprintln!("Bake '{}' not found", name);
std::process::exit(1);
}
};
let source_changed = source_type.is_some() || source.is_some();
let updated = BakeConfig {
name: name.clone(),
source_type: source_type
.as_deref()
.map(parse_source_type)
.unwrap_or(existing.source_type),
source: source.unwrap_or(existing.source),
base_dir: base_dir
.or_else(|| {
if source_changed {
std::env::current_dir().ok()
} else {
None
}
})
.or(existing.base_dir),
auth_headers: if auth_headers.is_empty() {
existing.auth_headers
} else {
auth_headers
},
env_vars: if env_vars.is_empty() {
existing.env_vars
} else {
env_vars
},
timeout_seconds: timeout_seconds.or(existing.timeout_seconds),
description: description.or(existing.description),
};
if !skip_validate {
validate_bake_config(&updated).await?;
}
store.update(updated)?;
println!("Updated bake: {}", name);
}
BakeAction::Remove { name } => {
let mut store = BakeStore::load()?;
store.remove(&name)?;
println!("Removed bake: {}", name);
}
},
Commands::Completions { shell } => {
let mut command = Cli::command();
generate(shell, &mut command, "sxmc", &mut std::io::stdout());
}
Commands::Doctor {
root,
global,
local,
check,
only_hosts,
fix,
remove,
dry_run,
from_cli,
depth,
skills_path,
allow_low_confidence,
human,
pretty,
format,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let mut report_hosts = only_hosts.clone();
let explicit_structured = format.is_some() || pretty;
if fix || remove {
let selected_hosts = infer_doctor_hosts(&install_paths, &only_hosts)?;
let from_cli =
infer_doctor_from_cli(&install_paths, &selected_hosts, from_cli.as_deref())?;
if report_hosts.is_empty() {
report_hosts = selected_hosts.clone();
}
if only_hosts.is_empty() && !explicit_structured {
println!(
"Auto-detected AI hosts: {}",
host_label_list(&selected_hosts)
);
}
if !explicit_structured {
println!("Using CLI surface: {}", from_cli);
}
let outcomes = repair_doctor_startup_files(DoctorRepairOptions {
install_paths: &install_paths,
only_hosts: &selected_hosts,
from_cli: &from_cli,
depth,
skills_path: &skills_path,
allow_low_confidence,
dry_run,
remove,
})?;
print_write_outcomes(&outcomes);
}
let value = doctor_value(&install_paths, &report_hosts)?;
if should_render_doctor_human(human, format, pretty, std::io::stdout().is_terminal()) {
print_doctor_report(&value);
} else if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
}
if check {
let startup_files = value["startup_files"].as_object();
let has_missing = startup_files
.map(|files| {
files
.values()
.any(|details| !details["present"].as_bool().unwrap_or(false))
})
.unwrap_or(false);
if has_missing {
std::process::exit(1);
}
}
}
Commands::Status {
root,
global,
local,
only_hosts,
compare_hosts,
health,
exit_code,
human,
pretty,
format,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let value =
status_value_with_health(&install_paths, &only_hosts, &compare_hosts, health)
.await?;
if should_render_doctor_human(human, format, pretty, std::io::stdout().is_terminal()) {
print_status_report(&value);
} else if let Some(format) = output::prefer_structured_output(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
let format = output::resolve_structured_format(format, pretty);
println!("{}", output::format_structured_value(&value, format));
}
if exit_code && status_has_unhealthy_baked_health(&value) {
std::process::exit(1);
}
}
Commands::Sync {
root,
global,
local,
only_hosts,
skills_path,
apply,
check,
allow_low_confidence,
pretty,
format,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let value = sync_saved_profiles_value(
&install_paths,
&only_hosts,
&skills_path,
apply,
allow_low_confidence,
)?;
if let Some(format) = explicit_structured_format(format, pretty) {
println!("{}", output::format_structured_value(&value, format));
} else {
println!("{}", format_sync_report(&value));
}
if check
&& (value["blocked_count"].as_u64().unwrap_or(0) > 0
|| value["error_count"].as_u64().unwrap_or(0) > 0
|| (!apply && value["changed_count"].as_u64().unwrap_or(0) > 0))
{
std::process::exit(1);
}
}
Commands::Watch {
root,
global,
local,
only_hosts,
compare_hosts,
health,
interval_seconds,
exit_on_change,
exit_on_unhealthy,
notify_file,
notify_command,
notify_webhooks,
notify_slack_webhooks,
notify_headers,
notify_template,
pretty,
format,
} => {
let install_paths = resolve_install_paths(root, global, local)?;
let stdout_is_tty = std::io::stdout().is_terminal();
let interval = Duration::from_secs(interval_seconds.max(1));
let notify_headers = parse_headers(¬ify_headers)?;
let mut last_rendered = None::<String>;
let mut first_frame = true;
loop {
let value =
status_value_with_health(&install_paths, &only_hosts, &compare_hosts, health)
.await?;
let rendered = render_status_output(&value, format, pretty, stdout_is_tty);
if last_rendered.as_ref() != Some(&rendered) {
println!("{rendered}");
println!();
std::io::stdout().flush()?;
let unhealthy = status_has_unhealthy_baked_health(&value);
let should_notify = !first_frame || unhealthy;
if should_notify {
let reason = if unhealthy { "unhealthy" } else { "change" };
let event = watch_event_value(&install_paths, reason, &value);
let payload = watch_notification_payload(notify_template, &event);
if let Some(path) = notify_file.as_ref() {
append_watch_notification(path, &payload)?;
}
if let Some(command) = notify_command.as_deref() {
run_watch_notify_command(command, &event, &payload, notify_template)?;
}
for webhook in ¬ify_webhooks {
send_watch_webhook(webhook, ¬ify_headers, &payload).await?;
}
if !notify_slack_webhooks.is_empty() {
let slack_payload = watch_notification_payload(
WatchNotificationTemplate::Slack,
&event,
);
for webhook in ¬ify_slack_webhooks {
send_watch_webhook(webhook, ¬ify_headers, &slack_payload)
.await?;
}
}
}
if exit_on_unhealthy && unhealthy {
std::process::exit(1);
}
if exit_on_change && !first_frame {
std::process::exit(1);
}
last_rendered = Some(rendered);
}
first_frame = false;
std::thread::sleep(interval);
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::{
annotate_mcp_tool_call_error, is_capability_not_supported, list_optional_surface,
looks_like_argument_shape_error, should_render_doctor_human, McpSurface,
};
use sxmc::error::SxmcError;
use sxmc::output::StructuredOutputFormat;
#[test]
fn detects_json_rpc_method_not_found_as_optional_capability_gap() {
let error = SxmcError::McpError(
"list_prompts failed: JSON-RPC error -32601: Method not found".into(),
);
assert!(is_capability_not_supported(&error));
}
#[test]
fn detects_textual_not_supported_errors() {
let error = SxmcError::McpError("list_resources failed: capability not supported".into());
assert!(is_capability_not_supported(&error));
}
#[test]
fn doctor_prefers_human_when_tty_and_no_structured_flags() {
assert!(should_render_doctor_human(false, None, false, true));
}
#[test]
fn doctor_prefers_json_when_not_tty() {
assert!(!should_render_doctor_human(false, None, false, false));
}
#[test]
fn doctor_human_flag_overrides_non_tty() {
assert!(!should_render_doctor_human(
false,
Some(StructuredOutputFormat::Json),
false,
true
));
assert!(should_render_doctor_human(true, None, false, false));
}
#[test]
fn does_not_hide_real_failures() {
let error = SxmcError::McpError("list_prompts failed: connection reset".into());
assert!(!is_capability_not_supported(&error));
}
#[test]
fn detects_argument_shape_errors() {
assert!(looks_like_argument_shape_error(
"call_tool failed: invalid params: expected object"
));
assert!(looks_like_argument_shape_error(
"call_tool failed: validation error: missing required property"
));
assert!(!looks_like_argument_shape_error(
"call_tool failed: connection reset"
));
}
#[test]
fn tool_call_errors_include_recovery_hints() {
let error = annotate_mcp_tool_call_error(
SxmcError::McpError("call_tool failed: invalid params: expected object".into()),
"sxmc mcp info demo/tool --format toon",
Some("sxmc mcp session demo"),
);
let rendered = error.to_string();
assert!(rendered.contains("Recovery hints:"));
assert!(rendered.contains("sxmc mcp info demo/tool --format toon"));
assert!(rendered.contains("sxmc mcp session demo"));
assert!(rendered.contains("stdout only"));
}
#[tokio::test]
async fn optional_surface_returns_empty_when_capability_is_missing() {
let items = list_optional_surface::<String, _>(McpSurface::Prompts, None, async {
Err(SxmcError::McpError(
"list_prompts failed: JSON-RPC error -32601: Method not found".into(),
))
})
.await
.unwrap();
assert!(items.is_empty());
}
#[tokio::test]
async fn optional_surface_skips_when_server_does_not_advertise_capability() {
let items = list_optional_surface::<String, _>(McpSurface::Resources, Some(false), async {
panic!("list future should not be polled when capability is absent");
#[allow(unreachable_code)]
Ok(Vec::new())
})
.await
.unwrap();
assert!(items.is_empty());
}
}