use anyhow::{Context, Result};
use blake3;
use chrono::{DateTime, Utc};
use colored::*;
use serde::{Deserialize, Serialize};
use indicatif::{ProgressBar, ProgressStyle};
use skill_runtime::{
InstanceManager, SearchPipeline, IndexDocument, SearchConfig,
DocumentMetadata,
};
use std::collections::HashMap;
use std::fs;
use std::io::{BufReader, BufWriter};
use std::path::PathBuf;
use std::sync::Arc;
use std::time::SystemTime;
#[derive(Debug, Clone, Serialize, Deserialize)]
struct EmbeddingCacheEntry {
tools: Vec<ToolDocument>,
skill_md_hash: Option<String>,
wasm_modified: u64,
cached_at: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
struct EmbeddingCache {
version: u32,
entries: HashMap<String, EmbeddingCacheEntry>, }
#[allow(dead_code)] impl EmbeddingCache {
const CURRENT_VERSION: u32 = 1;
fn cache_path() -> Result<PathBuf> {
let cache_dir = dirs::cache_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("skill-engine");
fs::create_dir_all(&cache_dir)?;
Ok(cache_dir.join("tool-index.json"))
}
fn load() -> Self {
Self::cache_path()
.ok()
.and_then(|p| fs::File::open(p).ok())
.and_then(|f| serde_json::from_reader(BufReader::new(f)).ok())
.filter(|c: &Self| c.version == Self::CURRENT_VERSION)
.unwrap_or_default()
}
fn save(&self) -> Result<()> {
let path = Self::cache_path()?;
let file = fs::File::create(path)?;
serde_json::to_writer(BufWriter::new(file), self)?;
Ok(())
}
fn is_valid(&self, skill_name: &str, wasm_modified: u64, skill_md_hash: Option<&str>) -> bool {
self.entries.get(skill_name).map_or(false, |entry| {
entry.wasm_modified == wasm_modified
&& entry.skill_md_hash.as_deref() == skill_md_hash
})
}
fn get(&self, skill_name: &str) -> Option<&Vec<ToolDocument>> {
self.entries.get(skill_name).map(|e| &e.tools)
}
fn set(&mut self, skill_name: String, tools: Vec<ToolDocument>, wasm_modified: u64, skill_md_hash: Option<String>) {
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
self.entries.insert(skill_name, EmbeddingCacheEntry {
tools,
skill_md_hash,
wasm_modified,
cached_at: now,
});
}
}
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)] struct SkillMdFrontmatter {
name: String,
description: String,
#[serde(rename = "allowed-tools", default)]
allowed_tools: Option<Vec<String>>,
#[serde(default)]
version: Option<String>,
#[serde(default)]
author: Option<String>,
#[serde(default)]
tags: Option<Vec<String>>,
#[serde(default)]
category: Option<String>,
}
#[derive(Debug, Clone)]
#[allow(dead_code)] struct SkillMdContent {
frontmatter: SkillMdFrontmatter,
body: String,
sections: HashMap<String, String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
pub struct ParameterDoc {
pub name: String,
pub required: bool,
pub param_type: String,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ParameterSignature {
pub name: String,
pub param_type: String,
pub required: bool,
pub description: String,
pub default: Option<String>,
pub allowed_values: Vec<String>,
}
impl From<&ParameterDoc> for ParameterSignature {
fn from(param: &ParameterDoc) -> Self {
Self {
name: param.name.clone(),
param_type: param.param_type.clone(),
required: param.required,
description: param.description.clone(),
default: None,
allowed_values: Vec::new(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
pub struct ExecutionSignature {
pub tool_name: String,
pub parameters: Vec<ParameterSignature>,
pub returns: String,
pub streaming: bool,
pub estimated_ms: Option<u32>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ToolAnalytics {
pub usage_count: u64,
pub success_rate_pct: u8,
pub avg_latency_ms: u32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_used: Option<DateTime<Utc>>,
}
impl PartialEq for ToolAnalytics {
fn eq(&self, other: &Self) -> bool {
self.usage_count == other.usage_count
&& self.success_rate_pct == other.success_rate_pct
&& self.avg_latency_ms == other.avg_latency_ms
&& self.last_used == other.last_used
}
}
impl Eq for ToolAnalytics {}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ToolDocument {
pub id: String,
pub skill_name: String,
pub instance_name: String,
pub tool_name: String,
pub description: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tags: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub action_verbs: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameter_names: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub skill_md_description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_documentation: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub usage_examples: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ParameterDoc>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub execution_signature: Option<ExecutionSignature>,
#[serde(default)]
pub analytics: ToolAnalytics,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub skill_version: Option<String>,
#[serde(default)]
pub indexed_at: DateTime<Utc>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content_hash: Option<String>,
pub full_text: String,
}
impl ToolDocument {
fn new(
skill_name: String,
instance_name: String,
tool_name: String,
description: String,
) -> Self {
let id = format!("{}@{}/{}", skill_name, instance_name, tool_name);
let action_verbs = extract_action_keywords(&tool_name, &description);
let full_text = format!(
"Skill: {} | Instance: {} | Tool: {} | Description: {}",
skill_name, instance_name, tool_name, description
);
let content_hash = compute_content_hash(&tool_name, &description, None);
Self {
id,
skill_name,
instance_name,
tool_name,
description,
category: None,
tags: Vec::new(),
action_verbs,
parameter_names: Vec::new(),
skill_md_description: None,
tool_documentation: None,
usage_examples: Vec::new(),
parameters: Vec::new(),
execution_signature: None,
analytics: ToolAnalytics::default(),
skill_version: None,
indexed_at: Utc::now(),
content_hash: Some(content_hash),
full_text,
}
}
fn with_skill_md(
skill_name: String,
instance_name: String,
tool_name: String,
description: String,
skill_md_content: Option<&SkillMdContent>,
) -> Self {
let id = format!("{}@{}/{}", skill_name, instance_name, tool_name);
let mut usage_examples = Vec::new();
let mut parameters = Vec::new();
let mut tool_documentation = None;
let skill_md_description = skill_md_content.as_ref().map(|md| md.frontmatter.description.clone());
let category = skill_md_content.as_ref().and_then(|md| md.frontmatter.category.clone());
let tags = skill_md_content
.as_ref()
.and_then(|md| md.frontmatter.tags.clone())
.unwrap_or_default();
let skill_version = skill_md_content.as_ref().and_then(|md| md.frontmatter.version.clone());
if let Some(md) = skill_md_content {
let tool_section = extract_tool_section(&md.body, &tool_name);
if let Some(ref section_text) = tool_section {
usage_examples = extract_examples(section_text);
parameters = extract_parameters(section_text);
}
tool_documentation = tool_section;
}
let action_verbs = extract_action_keywords(&tool_name, &description);
let parameter_names: Vec<String> = parameters.iter().map(|p| p.name.clone()).collect();
let execution_signature = Some(ExecutionSignature {
tool_name: tool_name.clone(),
parameters: parameters.iter().map(ParameterSignature::from).collect(),
returns: "string".to_string(), streaming: false,
estimated_ms: None,
});
let full_text = build_weighted_embedding_text(
&skill_name,
&instance_name,
&tool_name,
&description,
skill_md_description.as_deref(),
tool_documentation.as_deref(),
¶meters,
&usage_examples,
);
let content_hash = compute_content_hash(
&tool_name,
&description,
tool_documentation.as_deref(),
);
Self {
id,
skill_name,
instance_name,
tool_name,
description,
category,
tags,
action_verbs,
parameter_names,
skill_md_description,
tool_documentation,
usage_examples,
parameters,
execution_signature,
analytics: ToolAnalytics::default(),
skill_version,
indexed_at: Utc::now(),
content_hash: Some(content_hash),
full_text,
}
}
pub fn get_id(&self) -> &str {
&self.id
}
pub fn content_changed(&self, other_hash: &str) -> bool {
self.content_hash.as_deref() != Some(other_hash)
}
}
fn compute_content_hash(tool_name: &str, description: &str, documentation: Option<&str>) -> String {
let mut hasher = blake3::Hasher::new();
hasher.update(tool_name.as_bytes());
hasher.update(description.as_bytes());
if let Some(doc) = documentation {
hasher.update(doc.as_bytes());
}
hasher.finalize().to_hex()[..16].to_string() }
fn build_weighted_embedding_text(
skill_name: &str,
_instance_name: &str,
tool_name: &str,
description: &str,
skill_description: Option<&str>,
tool_documentation: Option<&str>,
parameters: &[ParameterDoc],
examples: &[String],
) -> String {
let mut parts = Vec::new();
parts.push(format!("Tool: {} - {}", tool_name, description));
parts.push(format!("{}: {}", tool_name, description));
let action_keywords = extract_action_keywords(tool_name, description);
if !action_keywords.is_empty() {
parts.push(format!("Actions: {}", action_keywords.join(", ")));
}
parts.push(format!("Skill: {}", skill_name));
if let Some(skill_desc) = skill_description {
parts.push(format!("Context: {}", skill_desc));
}
if !parameters.is_empty() {
let param_text: Vec<String> = parameters.iter()
.map(|p| format!("{} ({})", p.name, p.description))
.collect();
parts.push(format!("Parameters: {}", param_text.join(", ")));
}
if let Some(doc) = tool_documentation {
let clean_doc = clean_markdown_for_embedding(doc);
if clean_doc.len() > 200 {
parts.push(format!("Details: {}...", &clean_doc[..200]));
} else if !clean_doc.is_empty() {
parts.push(format!("Details: {}", clean_doc));
}
}
for example in examples.iter().take(2) {
parts.push(format!("Example: {}", example));
}
parts.join(" | ")
}
fn extract_action_keywords(tool_name: &str, description: &str) -> Vec<String> {
let action_verbs = [
"list", "get", "create", "delete", "update", "upload", "download",
"send", "receive", "fetch", "search", "find", "run", "execute",
"start", "stop", "restart", "deploy", "build", "test", "check",
"validate", "generate", "parse", "convert", "transform", "filter",
"sort", "merge", "split", "copy", "move", "rename", "echo", "greet",
"hello", "invoke", "call", "query", "read", "write", "append",
];
let combined = format!("{} {}", tool_name.replace('-', " "), description.to_lowercase());
let words: Vec<&str> = combined.split_whitespace().collect();
action_verbs.iter()
.filter(|&&verb| words.iter().any(|w| w.contains(verb)))
.map(|&s| s.to_string())
.collect()
}
fn clean_markdown_for_embedding(text: &str) -> String {
text.lines()
.filter(|line| {
let trimmed = line.trim();
!trimmed.starts_with('#')
&& !trimmed.starts_with("```")
&& !trimmed.starts_with("**")
&& !trimmed.is_empty()
})
.map(|line| line.trim())
.collect::<Vec<_>>()
.join(" ")
}
fn parse_skill_md(content: &str) -> Result<SkillMdContent> {
if !content.trim_start().starts_with("---") {
return Err(anyhow::anyhow!("SKILL.md missing frontmatter"));
}
let lines: Vec<&str> = content.lines().collect();
let mut frontmatter_start = None;
let mut frontmatter_end = None;
for (i, line) in lines.iter().enumerate() {
let trimmed = line.trim();
if trimmed == "---" {
if frontmatter_start.is_none() {
frontmatter_start = Some(i);
} else {
frontmatter_end = Some(i);
break;
}
}
}
let start = frontmatter_start.ok_or_else(|| anyhow::anyhow!("SKILL.md missing opening ---"))?;
let end = frontmatter_end.ok_or_else(|| anyhow::anyhow!("SKILL.md frontmatter not properly closed"))?;
let frontmatter_str = lines[start + 1..end].join("\n");
let body = lines[end + 1..].join("\n");
let frontmatter: SkillMdFrontmatter = serde_yaml::from_str(&frontmatter_str)
.with_context(|| format!("Failed to parse SKILL.md frontmatter: {}", frontmatter_str))?;
let sections = parse_markdown_sections(&body);
Ok(SkillMdContent { frontmatter, body, sections })
}
fn parse_markdown_sections(body: &str) -> HashMap<String, String> {
let mut sections = HashMap::new();
let lines: Vec<&str> = body.lines().collect();
let mut current_heading: Option<String> = None;
let mut current_content: Vec<&str> = Vec::new();
for line in lines {
if line.starts_with("## ") || line.starts_with("### ") {
if let Some(heading) = current_heading.take() {
sections.insert(heading, current_content.join("\n"));
current_content.clear();
}
current_heading = Some(line.trim_start_matches('#').trim().to_string());
} else if current_heading.is_some() {
current_content.push(line);
}
}
if let Some(heading) = current_heading {
sections.insert(heading, current_content.join("\n"));
}
sections
}
fn extract_tool_section(body: &str, tool_name: &str) -> Option<String> {
let heading = format!("### {}", tool_name);
let lines: Vec<&str> = body.lines().collect();
let mut start_idx = None;
for (i, line) in lines.iter().enumerate() {
if line.trim() == heading.trim() {
start_idx = Some(i);
break;
}
}
let start_idx = start_idx?;
let mut end_idx = lines.len();
for (i, line) in lines[start_idx + 1..].iter().enumerate() {
let trimmed = line.trim();
if (trimmed.starts_with("### ") && !trimmed.starts_with(&format!("### {}", tool_name)))
|| trimmed.starts_with("## ") {
end_idx = start_idx + 1 + i;
break;
}
}
Some(lines[start_idx..end_idx].join("\n"))
}
fn extract_examples(tool_doc: &str) -> Vec<String> {
let mut examples = Vec::new();
let lines: Vec<&str> = tool_doc.lines().collect();
let mut in_code_block = false;
let mut current_example = Vec::new();
for line in lines {
if line.trim().starts_with("```") {
if in_code_block {
if !current_example.is_empty() {
examples.push(current_example.join("\n"));
current_example.clear();
}
}
in_code_block = !in_code_block;
} else if in_code_block && line.contains("skill run") {
current_example.push(line.to_string());
}
}
examples
}
fn extract_parameters(tool_doc: &str) -> Vec<ParameterDoc> {
let mut parameters = Vec::new();
let lines: Vec<&str> = tool_doc.lines().collect();
let mut in_parameters_section = false;
for line in lines {
if line.trim() == "**Parameters**:" {
in_parameters_section = true;
continue;
}
if in_parameters_section {
if line.trim().starts_with("**") && line.trim() != "**Parameters**:" {
break;
}
if let Some(param) = parse_parameter_line(line) {
parameters.push(param);
}
}
}
parameters
}
fn parse_parameter_line(line: &str) -> Option<ParameterDoc> {
let trimmed = line.trim();
if !trimmed.starts_with("- `") {
return None;
}
let name_end = trimmed[3..].find('`')?;
let name = trimmed[3..3 + name_end].to_string();
let rest = &trimmed[3 + name_end + 1..];
let required = rest.contains("(required)");
let desc_start = rest.find(':')?;
let description = rest[desc_start + 1..].trim().to_string();
let param_type = if description.to_lowercase().contains("number") {
"number"
} else if description.to_lowercase().contains("boolean") || description.to_lowercase().contains("true/false") {
"boolean"
} else {
"string"
}.to_string();
Some(ParameterDoc {
name,
required,
param_type,
description,
})
}
fn display_rich_result(rank: usize, similarity_score: f64, tool: &ToolDocument) {
let score_percent = (similarity_score * 100.0) as u32;
let score_color = if score_percent >= 80 {
"green"
} else if score_percent >= 60 {
"yellow"
} else {
"red"
};
println!(
"{}. {} {}",
rank.to_string().bold(),
format!("[{}% match]", score_percent).color(score_color),
format!(
"{}@{} → {}",
tool.skill_name, tool.instance_name, tool.tool_name
)
.cyan()
.bold()
);
println!(" {}", tool.description.dimmed());
println!();
if let Some(ref skill_desc) = tool.skill_md_description {
println!(" {} {}", "📋 Context:".bold(), skill_desc);
println!();
}
if let Some(ref tool_doc) = tool.tool_documentation {
let lines: Vec<&str> = tool_doc.lines().take(5).collect();
if !lines.is_empty() {
println!(" {} {}", "📖 Details:".bold(), lines[0]);
for line in &lines[1..] {
if !line.trim().is_empty() && !line.trim().starts_with("#") {
println!(" {}", line.dimmed());
}
}
println!();
}
}
let usage_cmd = if tool.parameters.is_empty() {
format!("skill run {}@{}:{}", tool.skill_name, tool.instance_name, tool.tool_name)
} else {
let param_hints: Vec<String> = tool.parameters.iter()
.map(|p| if p.required {
format!("--{} <value>", p.name)
} else {
format!("[--{} <value>]", p.name)
})
.collect();
format!("skill run {}@{}:{} {}", tool.skill_name, tool.instance_name, tool.tool_name, param_hints.join(" "))
};
println!(" {} {}", "⚙️ Usage:".bold(), usage_cmd.cyan());
println!();
if !tool.parameters.is_empty() {
println!(" {} Parameters:", "📝".bold());
for param in &tool.parameters {
let req_str = if param.required { "required" } else { "optional" };
println!(
" • {} ({}, {}): {}",
param.name.cyan(),
param.param_type.yellow(),
req_str.dimmed(),
param.description.dimmed()
);
}
println!();
}
if !tool.usage_examples.is_empty() {
println!(" {} Example:", "💡".bold());
for example in &tool.usage_examples {
println!(" {}", example.green());
}
println!();
}
println!();
}
fn get_config_path() -> Result<PathBuf> {
let home = dirs::home_dir().context("Failed to get home directory")?;
Ok(home.join(".skill-engine").join("search.toml"))
}
fn load_or_create_config(provider: &str, model: Option<&str>, is_json: bool) -> Result<(SearchConfig, bool)> {
let config_path = get_config_path()?;
let first_run = !config_path.exists();
let mut config = if config_path.exists() {
SearchConfig::from_toml_file(&config_path).unwrap_or_default()
} else {
if !is_json {
println!();
println!("{}", "Welcome to Skill Engine Search!".bold().cyan());
println!();
println!(" This is your first time using semantic search.");
println!(" Using FastEmbed (local, no API key required).");
println!();
println!(" {} Run {} for more options (OpenAI, Ollama, etc.)",
"Tip:".yellow().bold(), "skill setup".cyan());
println!();
}
let config = SearchConfig::default();
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)?;
}
let toml_str = toml::to_string_pretty(&config)?;
fs::write(&config_path, &toml_str)?;
if !is_json {
println!(" {} Saved config to {}", "✓".green(), config_path.display());
println!();
}
config
};
let provider_lower = provider.to_lowercase();
if provider_lower != "fastembed" || model.is_some() {
match provider_lower.as_str() {
"fastembed" => {
config.embedding.provider = "fastembed".to_string();
if let Some(m) = model {
config.embedding.model = m.to_string();
}
}
"openai" => {
config.embedding.provider = "openai".to_string();
if let Some(m) = model {
config.embedding.model = m.to_string();
} else if config.embedding.provider != "openai" {
config.embedding.model = "text-embedding-ada-002".to_string();
}
config.embedding.openai_api_key = std::env::var("OPENAI_API_KEY").ok();
if config.embedding.openai_api_key.is_none() {
return Err(anyhow::anyhow!(
"OPENAI_API_KEY not set. Set it with: export OPENAI_API_KEY=your-key-here\n\
Or run 'skill setup' to configure a different provider."
));
}
}
"ollama" => {
config.embedding.provider = "ollama".to_string();
if let Some(m) = model {
config.embedding.model = m.to_string();
} else if config.embedding.provider != "ollama" {
config.embedding.model = "nomic-embed-text".to_string();
}
}
_ => {
return Err(anyhow::anyhow!(
"Unknown provider: {}. Supported: fastembed, openai, ollama\n\
Run 'skill setup' to configure interactively.",
provider
));
}
}
}
Ok((config, first_run))
}
pub async fn execute(
query: &str,
top_k: Option<usize>,
provider: &str,
model: Option<&str>,
format: &str,
) -> Result<()> {
let top_k = top_k.unwrap_or(5);
let is_json = format == "json";
if !is_json {
println!();
println!("{} Searching for: {}", "→".cyan(), query.yellow());
}
let tool_documents = load_all_tools().await?;
if tool_documents.is_empty() {
if is_json {
println!("{{\"results\": [], \"error\": \"No skills installed\"}}");
} else {
println!("{} No skills installed yet. Install a skill first with: skill install <source>", "!".yellow());
}
return Ok(());
}
if !is_json {
println!(
"{} Found {} tools, generating embeddings...",
"→".cyan(),
tool_documents.len()
);
}
let (config, first_run) = load_or_create_config(provider, model, is_json)?;
if !is_json {
match config.embedding.provider.as_str() {
"fastembed" => println!("{} Using FastEmbed (local, no API key required)", "✓".green()),
"openai" => println!("{} Using OpenAI (requires OPENAI_API_KEY)", "→".cyan()),
"ollama" => println!("{} Using Ollama (requires local Ollama server)", "→".cyan()),
_ => {}
}
}
let pb = if !is_json && first_run {
let pb = ProgressBar::new_spinner();
pb.set_style(ProgressStyle::default_spinner()
.template("{spinner:.cyan} {msg}")
.unwrap());
pb.set_message("Downloading embedding model (first run only)...");
pb.enable_steady_tick(std::time::Duration::from_millis(100));
Some(pb)
} else {
if !is_json {
println!("{} Initializing search pipeline...", "→".cyan());
}
None
};
let pipeline = SearchPipeline::from_config(config).await
.context("Failed to initialize search pipeline")?;
if let Some(pb) = pb {
pb.finish_with_message(format!("{} Model ready", "✓".green()));
}
let index_docs: Vec<IndexDocument> = tool_documents
.iter()
.map(|tool| IndexDocument {
id: tool.id.clone(),
content: tool.full_text.clone(),
metadata: DocumentMetadata {
skill_name: Some(tool.skill_name.clone()),
instance_name: Some(tool.instance_name.clone()),
tool_name: Some(tool.tool_name.clone()),
category: tool.category.clone(),
tags: tool.tags.clone(),
custom: HashMap::new(),
},
})
.collect();
pipeline.index_documents(index_docs).await
.context("Failed to index tools")?;
if !is_json {
println!("{} Searching for relevant tools...", "→".cyan());
}
let search_results = pipeline.search(query, top_k).await
.context("Failed to perform semantic search")?;
let results: Vec<(f64, String, ToolDocument)> = search_results
.into_iter()
.filter_map(|result| {
tool_documents.iter()
.find(|tool| tool.id == result.id)
.map(|tool| (result.score as f64, result.id.clone(), tool.clone()))
})
.collect();
match format {
"json" => display_json_results(&results)?,
"compact" => display_compact_results(&results),
_ => display_rich_results(&results, top_k),
}
Ok(())
}
fn display_json_results(results: &[(f64, String, ToolDocument)]) -> Result<()> {
#[derive(Serialize)]
struct JsonParameter {
name: String,
#[serde(rename = "type")]
param_type: String,
required: bool,
description: String,
}
#[derive(Serialize)]
struct JsonExecutionSignature {
skill: String,
tool: String,
instance: String,
args: HashMap<String, serde_json::Value>,
}
#[derive(Serialize)]
struct JsonResult {
score: f64,
score_percent: u32,
relevance: String,
id: String,
skill: String,
instance: String,
tool: String,
description: String,
skill_description: Option<String>,
tool_documentation: Option<String>,
parameters: Vec<JsonParameter>,
examples: Vec<String>,
execution: JsonExecutionSignature,
cli_command: String,
category: Option<String>,
tags: Vec<String>,
version: Option<String>,
}
let json_results: Vec<JsonResult> = results.iter()
.map(|(score, _, tool)| {
let score_pct = (*score * 100.0) as u32;
let relevance = match score_pct {
80..=100 => "excellent",
60..=79 => "good",
40..=59 => "fair",
_ => "partial",
}.to_string();
let mut args = HashMap::new();
for param in &tool.parameters {
let placeholder = match param.param_type.as_str() {
"string" => serde_json::Value::String("<value>".to_string()),
"number" | "integer" => serde_json::Value::Number(0.into()),
"boolean" => serde_json::Value::Bool(true),
_ => serde_json::Value::String("<value>".to_string()),
};
args.insert(param.name.clone(), placeholder);
}
let execution = JsonExecutionSignature {
skill: tool.skill_name.clone(),
tool: tool.tool_name.clone(),
instance: tool.instance_name.clone(),
args,
};
let cli_command = if tool.parameters.is_empty() {
format!("skill run {}@{}:{}", tool.skill_name, tool.instance_name, tool.tool_name)
} else {
let param_hints: Vec<String> = tool.parameters.iter()
.map(|p| if p.required {
format!("{}=<value>", p.name)
} else {
format!("[{}=<value>]", p.name)
})
.collect();
format!("skill run {}@{}:{} {}", tool.skill_name, tool.instance_name, tool.tool_name, param_hints.join(" "))
};
JsonResult {
score: *score,
score_percent: score_pct,
relevance,
id: tool.id.clone(),
skill: tool.skill_name.clone(),
instance: tool.instance_name.clone(),
tool: tool.tool_name.clone(),
description: tool.description.clone(),
skill_description: tool.skill_md_description.clone(),
tool_documentation: tool.tool_documentation.clone(),
parameters: tool.parameters.iter().map(|p| JsonParameter {
name: p.name.clone(),
param_type: p.param_type.clone(),
required: p.required,
description: p.description.clone(),
}).collect(),
examples: tool.usage_examples.clone(),
execution,
cli_command,
category: tool.category.clone(),
tags: tool.tags.clone(),
version: tool.skill_version.clone(),
}
})
.collect();
println!("{}", serde_json::to_string_pretty(&json_results)?);
Ok(())
}
fn display_compact_results(results: &[(f64, String, ToolDocument)]) {
for (score, _, tool) in results {
let score_pct = (score * 100.0) as u32;
println!(
"[{:3}%] {}@{}:{} - {}",
score_pct,
tool.skill_name,
tool.instance_name,
tool.tool_name,
tool.description
);
}
}
fn display_rich_results(results: &[(f64, String, ToolDocument)], top_k: usize) {
println!();
println!("{}", "━".repeat(80).dimmed());
println!();
println!("{} Top {} matching tools:", "✓".green().bold(), top_k);
println!();
for (idx, (similarity_score, _doc_id, tool)) in results.iter().enumerate() {
display_rich_result(idx + 1, *similarity_score, tool);
}
println!("{}", "━".repeat(80).dimmed());
println!();
println!(
"{} Use {} to see all available tools",
"💡".yellow(),
"skill list --verbose".cyan()
);
println!();
}
async fn load_all_tools() -> Result<Vec<ToolDocument>> {
let instance_manager = InstanceManager::new()?;
let mut tool_documents = Vec::new();
let registry_dir = dirs::home_dir()
.context("Failed to get home directory")?
.join(".skill-engine")
.join("registry");
if !registry_dir.exists() {
return Ok(tool_documents);
}
for entry in std::fs::read_dir(®istry_dir)? {
let entry = entry?;
if !entry.file_type()?.is_dir() {
continue;
}
let skill_name = entry
.file_name()
.to_string_lossy()
.to_string();
let instances = instance_manager
.list_instances(&skill_name)
.unwrap_or_default();
for instance_name in instances {
if let Ok(tools) = load_skill_tools(&skill_name, &instance_name).await {
for tool in tools {
tool_documents.push(tool);
}
}
}
}
Ok(tool_documents)
}
async fn load_skill_tools(
skill_name: &str,
instance_name: &str,
) -> Result<Vec<ToolDocument>> {
use skill_runtime::{SkillEngine, SkillExecutor};
let mut tool_documents = Vec::new();
let skill_dir = dirs::home_dir()
.context("Failed to get home directory")?
.join(".skill-engine")
.join("registry")
.join(skill_name);
let skill_path = skill_dir.join(format!("{}.wasm", skill_name));
if !skill_path.exists() {
return Ok(tool_documents);
}
let skill_md_path = skill_dir.join("SKILL.md");
let skill_md_content = if skill_md_path.exists() {
fs::read_to_string(&skill_md_path)
.ok()
.and_then(|content| parse_skill_md(&content).ok())
} else {
None
};
let instance_manager = InstanceManager::new()?;
let config = instance_manager
.load_instance(skill_name, instance_name)
.unwrap_or_default();
let engine = Arc::new(SkillEngine::new()?);
let executor = SkillExecutor::load(
engine,
&skill_path,
skill_name.to_string(),
instance_name.to_string(),
config,
)
.await?;
let tools = executor.get_tools().await?;
for tool in tools {
let doc = if skill_md_content.is_some() {
ToolDocument::with_skill_md(
skill_name.to_string(),
instance_name.to_string(),
tool.name.clone(),
tool.description.clone(),
skill_md_content.as_ref(),
)
} else {
ToolDocument::new(
skill_name.to_string(),
instance_name.to_string(),
tool.name,
tool.description,
)
};
tool_documents.push(doc);
}
Ok(tool_documents)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tool_document_new_creates_id() {
let doc = ToolDocument::new(
"test-skill".to_string(),
"default".to_string(),
"hello".to_string(),
"Says hello".to_string(),
);
assert_eq!(doc.id, "test-skill@default/hello");
assert_eq!(doc.skill_name, "test-skill");
assert_eq!(doc.instance_name, "default");
assert_eq!(doc.tool_name, "hello");
assert_eq!(doc.description, "Says hello");
assert!(doc.category.is_none());
assert!(doc.tags.is_empty());
assert!(doc.content_hash.is_some());
assert!(doc.execution_signature.is_none());
}
#[test]
fn test_tool_document_extracts_action_verbs() {
let doc = ToolDocument::new(
"kubernetes".to_string(),
"default".to_string(),
"list-pods".to_string(),
"List all pods in the cluster".to_string(),
);
assert!(doc.action_verbs.contains(&"list".to_string()));
}
#[test]
fn test_content_hash_changes_with_content() {
let doc1 = ToolDocument::new(
"test".to_string(),
"default".to_string(),
"hello".to_string(),
"Says hello".to_string(),
);
let doc2 = ToolDocument::new(
"test".to_string(),
"default".to_string(),
"hello".to_string(),
"Says goodbye".to_string(),
);
assert_ne!(doc1.content_hash, doc2.content_hash);
}
#[test]
fn test_content_hash_same_for_identical_content() {
let doc1 = ToolDocument::new(
"test".to_string(),
"default".to_string(),
"hello".to_string(),
"Says hello".to_string(),
);
let doc2 = ToolDocument::new(
"test".to_string(),
"default".to_string(),
"hello".to_string(),
"Says hello".to_string(),
);
assert_eq!(doc1.content_hash, doc2.content_hash);
}
#[test]
fn test_tool_document_with_skill_md() {
let frontmatter = SkillMdFrontmatter {
name: "test-skill".to_string(),
description: "A test skill".to_string(),
allowed_tools: Some(vec!["Read".to_string()]),
version: Some("1.0.0".to_string()),
author: None,
tags: Some(vec!["testing".to_string(), "demo".to_string()]),
category: Some("utilities".to_string()),
};
let skill_md = SkillMdContent {
frontmatter,
body: "# Test\n\n## Tools\n\n### hello\n\nSay hello to someone.\n".to_string(),
sections: HashMap::new(),
};
let doc = ToolDocument::with_skill_md(
"test-skill".to_string(),
"default".to_string(),
"hello".to_string(),
"Says hello".to_string(),
Some(&skill_md),
);
assert_eq!(doc.category, Some("utilities".to_string()));
assert_eq!(doc.tags, vec!["testing".to_string(), "demo".to_string()]);
assert_eq!(doc.skill_version, Some("1.0.0".to_string()));
assert!(doc.execution_signature.is_some());
assert!(doc.skill_md_description.is_some());
}
#[test]
fn test_execution_signature_from_parameters() {
let params = vec![
ParameterDoc {
name: "message".to_string(),
required: true,
param_type: "string".to_string(),
description: "The message to display".to_string(),
},
];
let sig: ParameterSignature = (¶ms[0]).into();
assert_eq!(sig.name, "message");
assert!(sig.required);
assert_eq!(sig.param_type, "string");
}
#[test]
fn test_tool_analytics_default() {
let analytics = ToolAnalytics::default();
assert_eq!(analytics.usage_count, 0);
assert_eq!(analytics.success_rate_pct, 0);
assert_eq!(analytics.avg_latency_ms, 0);
assert!(analytics.last_used.is_none());
}
#[test]
fn test_tool_document_equality() {
let doc1 = ToolDocument::new(
"test".to_string(),
"default".to_string(),
"hello".to_string(),
"Says hello".to_string(),
);
let doc2 = ToolDocument::new(
"test".to_string(),
"default".to_string(),
"hello".to_string(),
"Says hello".to_string(),
);
assert_eq!(doc1.id, doc2.id);
}
#[test]
fn test_compute_content_hash() {
let hash1 = compute_content_hash("tool1", "desc1", None);
let hash2 = compute_content_hash("tool1", "desc1", None);
let hash3 = compute_content_hash("tool1", "desc2", None);
let hash4 = compute_content_hash("tool1", "desc1", Some("extra doc"));
assert_eq!(hash1, hash2);
assert_ne!(hash1, hash3);
assert_ne!(hash1, hash4);
assert_eq!(hash1.len(), 16);
}
}