1use anyhow::{Context, Result};
8use rmcp::{
9 ErrorData as McpError,
10 handler::server::{
11 router::{tool::ToolRoute, Router},
12 ServerHandler,
13 },
14 model::{
15 CallToolResult, Content, Implementation, ProtocolVersion,
16 ServerCapabilities, ServerInfo, Tool,
17 },
18 ServiceExt,
19 transport::stdio,
20};
21use schemars::JsonSchema;
22use serde::{Deserialize, Serialize};
23use skill_runtime::{
24 InstanceManager, LocalSkillLoader, SkillEngine, SkillExecutor, SkillManifest,
25 SearchPipeline, IndexDocument, SearchConfig, DocumentMetadata,
26};
27use std::borrow::Cow;
28use std::collections::HashMap;
29use std::path::PathBuf;
30use std::sync::Arc;
31use tokio::sync::RwLock;
32
33#[derive(Debug, Clone)]
35pub struct DiscoveredTool {
36 pub skill_name: String,
37 pub instance_name: String,
38 pub tool_name: String,
39 pub description: String,
40 pub parameters: Vec<ToolParameter>,
41 pub source_path: Option<PathBuf>,
42}
43
44#[derive(Debug, Clone)]
45pub struct ToolParameter {
46 pub name: String,
47 pub param_type: String,
48 pub description: String,
49 pub required: bool,
50}
51
52#[derive(Debug, Deserialize, JsonSchema)]
54pub struct ExecuteSkillRequest {
55 #[schemars(description = "The skill name to execute")]
57 pub skill: String,
58
59 #[schemars(description = "The tool name within the skill")]
61 pub tool: String,
62
63 #[serde(default = "default_instance")]
65 #[schemars(description = "The instance name (default: 'default')")]
66 pub instance: String,
67
68 #[serde(default)]
70 #[schemars(description = "Tool arguments as key-value pairs")]
71 pub args: HashMap<String, serde_json::Value>,
72
73 #[serde(default)]
78 #[schemars(description = "Maximum characters in output. Use to prevent context overflow. Example: 4000")]
79 pub max_output: Option<usize>,
80
81 #[serde(default)]
83 #[schemars(description = "How to truncate: 'head' (keep start), 'tail' (keep end), 'middle' (keep both ends), 'smart' (preserve structure)")]
84 pub truncate: Option<String>,
85
86 #[serde(default)]
88 #[schemars(description = "Regex pattern to filter output lines. Only matching lines are returned.")]
89 pub grep: Option<String>,
90
91 #[serde(default)]
93 #[schemars(description = "Invert grep: return lines that DON'T match the pattern")]
94 pub grep_invert: Option<bool>,
95
96 #[serde(default)]
98 #[schemars(description = "Return only first N lines of output")]
99 pub head: Option<usize>,
100
101 #[serde(default)]
103 #[schemars(description = "Return only last N lines of output")]
104 pub tail: Option<usize>,
105
106 #[serde(default)]
108 #[schemars(description = "Transform output: 'json' (parse as JSON), 'lines' (split into array), 'count' (line count only), 'summary' (AI summary)")]
109 pub format: Option<String>,
110
111 #[serde(default)]
113 #[schemars(description = "JSONPath expression to extract specific data from JSON output. Example: '.items[].metadata.name'")]
114 pub jq: Option<String>,
115
116 #[serde(default)]
118 #[schemars(description = "Include execution metadata (timing, truncation info, etc.)")]
119 pub include_metadata: Option<bool>,
120}
121
122fn default_instance() -> String {
123 "default".to_string()
124}
125
126#[derive(Debug, Serialize)]
128struct ProcessedOutput {
129 content: String,
131 truncated: bool,
133 original_length: usize,
135 final_length: usize,
137 grep_matches: Option<usize>,
139 processing: Vec<String>,
141}
142
143fn process_output(
145 output: &str,
146 max_output: Option<usize>,
147 truncate_strategy: Option<&str>,
148 grep_pattern: Option<&str>,
149 grep_invert: bool,
150 head_lines: Option<usize>,
151 tail_lines: Option<usize>,
152 format: Option<&str>,
153 jq_path: Option<&str>,
154) -> ProcessedOutput {
155 let original_length = output.len();
156 let mut content = output.to_string();
157 let mut processing = Vec::new();
158 let mut truncated = false;
159 let mut grep_matches = None;
160
161 if let Some(pattern) = grep_pattern {
163 if let Ok(regex) = regex::Regex::new(pattern) {
164 let lines: Vec<&str> = content.lines().collect();
165 let filtered: Vec<&str> = lines
166 .into_iter()
167 .filter(|line| {
168 let matches = regex.is_match(line);
169 if grep_invert { !matches } else { matches }
170 })
171 .collect();
172 grep_matches = Some(filtered.len());
173 content = filtered.join("\n");
174 processing.push(format!("grep(pattern='{}', invert={}, matches={})",
175 pattern, grep_invert, grep_matches.unwrap_or(0)));
176 }
177 }
178
179 if let Some(n) = head_lines {
181 let lines: Vec<&str> = content.lines().take(n).collect();
182 if content.lines().count() > n {
183 truncated = true;
184 }
185 content = lines.join("\n");
186 processing.push(format!("head({})", n));
187 } else if let Some(n) = tail_lines {
188 let all_lines: Vec<&str> = content.lines().collect();
189 if all_lines.len() > n {
190 truncated = true;
191 content = all_lines[all_lines.len().saturating_sub(n)..].join("\n");
192 }
193 processing.push(format!("tail({})", n));
194 }
195
196 if let Some(path) = jq_path {
198 if let Ok(json_val) = serde_json::from_str::<serde_json::Value>(&content) {
199 content = extract_json_path(&json_val, path);
200 processing.push(format!("jq('{}')", path));
201 }
202 }
203
204 if let Some(fmt) = format {
206 match fmt {
207 "json" => {
208 if let Ok(json_val) = serde_json::from_str::<serde_json::Value>(&content) {
210 content = serde_json::to_string_pretty(&json_val).unwrap_or(content);
211 processing.push("format(json)".to_string());
212 }
213 }
214 "lines" => {
215 let lines: Vec<&str> = content.lines().collect();
217 content = serde_json::to_string(&lines).unwrap_or(content);
218 processing.push("format(lines)".to_string());
219 }
220 "count" => {
221 let count = content.lines().count();
223 content = format!("{} lines", count);
224 processing.push("format(count)".to_string());
225 }
226 "compact" => {
227 let lines: Vec<&str> = content.lines()
229 .map(|l| l.trim())
230 .filter(|l| !l.is_empty())
231 .collect();
232 content = lines.join("\n");
233 processing.push("format(compact)".to_string());
234 }
235 _ => {}
236 }
237 }
238
239 if let Some(max) = max_output {
241 if content.len() > max {
242 truncated = true;
243 let strategy = truncate_strategy.unwrap_or("smart");
244 content = truncate_content(&content, max, strategy);
245 processing.push(format!("truncate({}, strategy='{}')", max, strategy));
246 }
247 }
248
249 ProcessedOutput {
250 final_length: content.len(),
251 content,
252 truncated,
253 original_length,
254 grep_matches,
255 processing,
256 }
257}
258
259fn truncate_content(content: &str, max_len: usize, strategy: &str) -> String {
261 if content.len() <= max_len {
262 return content.to_string();
263 }
264
265 match strategy {
266 "head" => {
267 let truncated = &content[..max_len.saturating_sub(50)];
269 format!("{}\n\n... [TRUNCATED: {} more characters]", truncated, content.len() - truncated.len())
270 }
271 "tail" => {
272 let start = content.len().saturating_sub(max_len.saturating_sub(50));
274 let truncated = &content[start..];
275 format!("[TRUNCATED: {} characters omitted] ...\n\n{}", start, truncated)
276 }
277 "middle" => {
278 let half = (max_len.saturating_sub(100)) / 2;
280 let head = &content[..half];
281 let tail = &content[content.len().saturating_sub(half)..];
282 let omitted = content.len() - (head.len() + tail.len());
283 format!("{}\n\n... [TRUNCATED: {} characters in middle] ...\n\n{}", head, omitted, tail)
284 }
285 "smart" | _ => {
286 smart_truncate(content, max_len)
288 }
289 }
290}
291
292fn smart_truncate(content: &str, max_len: usize) -> String {
294 if content.trim().starts_with('{') || content.trim().starts_with('[') {
296 return smart_truncate_json(content, max_len);
297 }
298
299 let mut result = String::new();
301 let mut remaining = max_len.saturating_sub(100); let lines: Vec<&str> = content.lines().collect();
303 let total_lines = lines.len();
304 let mut included_lines = 0;
305
306 for line in lines {
307 if result.len() + line.len() + 1 > remaining {
308 break;
309 }
310 if !result.is_empty() {
311 result.push('\n');
312 }
313 result.push_str(line);
314 included_lines += 1;
315 }
316
317 if included_lines < total_lines {
318 let omitted_lines = total_lines - included_lines;
319 let omitted_chars = content.len() - result.len();
320 result.push_str(&format!(
321 "\n\n... [TRUNCATED: {} more lines, {} characters]\n\
322 💡 Tip: Use grep='<pattern>' to filter, or head=N/tail=N to limit lines",
323 omitted_lines, omitted_chars
324 ));
325 }
326
327 result
328}
329
330fn smart_truncate_json(content: &str, max_len: usize) -> String {
332 if let Ok(json) = serde_json::from_str::<serde_json::Value>(content) {
333 if let serde_json::Value::Array(arr) = &json {
335 let total = arr.len();
336 let mut truncated_arr = Vec::new();
337 let mut current_len = 2; for (idx, item) in arr.iter().enumerate() {
340 let item_str = serde_json::to_string(item).unwrap_or_default();
341 if current_len + item_str.len() + 2 > max_len.saturating_sub(150) {
342 let notice = serde_json::json!({
344 "_truncated": true,
345 "_message": format!("... {} more items", total - idx),
346 "_total_items": total,
347 "_shown_items": idx,
348 "_tip": "Use jq='.items[0:10]' to select specific range, or grep to filter"
349 });
350 truncated_arr.push(notice);
351 break;
352 }
353 truncated_arr.push(item.clone());
354 current_len += item_str.len() + 2;
355 }
356
357 return serde_json::to_string_pretty(&truncated_arr)
358 .unwrap_or_else(|_| content[..max_len].to_string());
359 }
360
361 if let Ok(pretty) = serde_json::to_string_pretty(&json) {
363 if pretty.len() <= max_len {
364 return pretty;
365 }
366 }
367 }
368
369 truncate_content(content, max_len, "head")
371}
372
373fn extract_json_path(json: &serde_json::Value, path: &str) -> String {
375 let path = path.trim_start_matches('.');
376 let parts: Vec<&str> = path.split('.').collect();
377
378 let mut current = json.clone();
379 for part in parts {
380 if part.is_empty() {
381 continue;
382 }
383
384 if part.contains('[') {
386 let (field, bracket) = part.split_once('[').unwrap_or((part, ""));
387
388 if !field.is_empty() {
390 current = current.get(field).cloned().unwrap_or(serde_json::Value::Null);
391 }
392
393 if bracket.starts_with(']') {
395 if let serde_json::Value::Array(arr) = current {
397 let extracted: Vec<String> = arr.iter()
398 .map(|v| match v {
399 serde_json::Value::String(s) => s.clone(),
400 other => serde_json::to_string(other).unwrap_or_default(),
401 })
402 .collect();
403 return extracted.join("\n");
404 }
405 } else if let Some(idx_str) = bracket.strip_suffix(']') {
406 if idx_str.contains(':') {
408 let range_parts: Vec<&str> = idx_str.split(':').collect();
410 if let (Ok(start), Ok(end)) = (
411 range_parts.get(0).unwrap_or(&"0").parse::<usize>(),
412 range_parts.get(1).unwrap_or(&"").parse::<usize>()
413 ) {
414 if let serde_json::Value::Array(arr) = current {
415 let sliced: Vec<_> = arr.iter().skip(start).take(end - start).cloned().collect();
416 return serde_json::to_string_pretty(&sliced).unwrap_or_default();
417 }
418 }
419 } else if let Ok(idx) = idx_str.parse::<usize>() {
420 current = current.get(idx).cloned().unwrap_or(serde_json::Value::Null);
421 }
422 }
423 } else {
424 current = current.get(part).cloned().unwrap_or(serde_json::Value::Null);
425 }
426 }
427
428 match current {
429 serde_json::Value::String(s) => s,
430 serde_json::Value::Null => "null".to_string(),
431 other => serde_json::to_string_pretty(&other).unwrap_or_default(),
432 }
433}
434
435#[derive(Debug, Deserialize, JsonSchema)]
437pub struct ListSkillsRequest {
438 #[schemars(description = "Optional skill name to filter tools by")]
440 pub skill: Option<String>,
441
442 #[serde(default)]
444 #[schemars(description = "Pagination offset (0-based). Use with 'limit' to paginate through large tool lists.")]
445 pub offset: Option<usize>,
446
447 #[serde(default)]
449 #[schemars(description = "Maximum number of tools to return (default: all). Use with 'offset' for pagination.")]
450 pub limit: Option<usize>,
451}
452
453#[derive(Debug, Serialize)]
455pub struct PaginationInfo {
456 pub total: usize,
457 pub offset: usize,
458 pub limit: Option<usize>,
459 pub returned: usize,
460 pub has_more: bool,
461}
462
463#[derive(Clone)]
465pub struct McpServer {
466 engine: Arc<SkillEngine>,
468 instance_manager: Arc<InstanceManager>,
470 local_loader: Arc<LocalSkillLoader>,
472 tools: Arc<RwLock<HashMap<String, DiscoveredTool>>>,
474 manifest: Option<SkillManifest>,
476 search_pipeline: Arc<RwLock<Option<SearchPipeline>>>,
478}
479
480impl McpServer {
481 pub fn new() -> Result<Self> {
483 let engine = Arc::new(SkillEngine::new()?);
484 let instance_manager = Arc::new(InstanceManager::new()?);
485 let local_loader = Arc::new(LocalSkillLoader::new()?);
486
487 Ok(Self {
488 engine,
489 instance_manager,
490 local_loader,
491 tools: Arc::new(RwLock::new(HashMap::new())),
492 manifest: None,
493 search_pipeline: Arc::new(RwLock::new(None)),
494 })
495 }
496
497 pub fn with_manifest(manifest: SkillManifest) -> Result<Self> {
499 let mut server = Self::new()?;
500 server.manifest = Some(manifest);
501 Ok(server)
502 }
503
504 pub async fn discover_tools(&self) -> Result<Vec<DiscoveredTool>> {
506 let mut discovered = Vec::new();
507
508 let home = dirs::home_dir().context("Failed to get home directory")?;
510 let registry_dir = home.join(".skill-engine").join("registry");
511
512 if registry_dir.exists() {
513 for entry in std::fs::read_dir(®istry_dir)? {
514 let entry = entry?;
515 if entry.file_type()?.is_dir() {
516 let skill_name = entry.file_name().to_string_lossy().to_string();
517 if let Ok(tools) = self.discover_skill_tools(&skill_name, None).await {
518 discovered.extend(tools);
519 }
520 }
521 }
522 }
523
524 if let Some(ref manifest) = self.manifest {
526 for skill_name in manifest.skill_names() {
527 if let Ok(resolved) = manifest.resolve_instance(skill_name, None) {
528 if let Ok(tools) = self
529 .discover_skill_tools_from_path(
530 skill_name,
531 &resolved.instance_name,
532 &PathBuf::from(&resolved.source),
533 )
534 .await
535 {
536 discovered.extend(tools);
537 }
538 }
539 }
540 }
541
542 let mut cache = self.tools.write().await;
544 for tool in &discovered {
545 let key = format!("{}@{}:{}", tool.skill_name, tool.instance_name, tool.tool_name);
546 cache.insert(key, tool.clone());
547 }
548
549 Ok(discovered)
550 }
551
552 async fn discover_skill_tools(
554 &self,
555 skill_name: &str,
556 instance_name: Option<&str>,
557 ) -> Result<Vec<DiscoveredTool>> {
558 let instances = self
559 .instance_manager
560 .list_instances(skill_name)
561 .unwrap_or_default();
562
563 let target_instances: Vec<String> = if let Some(name) = instance_name {
564 vec![name.to_string()]
565 } else if instances.is_empty() {
566 vec!["default".to_string()]
567 } else {
568 instances
569 };
570
571 let mut tools = Vec::new();
572
573 for instance in target_instances {
574 let home = dirs::home_dir().context("Failed to get home directory")?;
576 let skill_path = home
577 .join(".skill-engine")
578 .join("registry")
579 .join(skill_name);
580
581 if skill_path.exists() {
582 if let Ok(skill_tools) =
583 self.discover_skill_tools_from_path(skill_name, &instance, &skill_path)
584 .await
585 {
586 tools.extend(skill_tools);
587 }
588 }
589 }
590
591 Ok(tools)
592 }
593
594 async fn discover_skill_tools_from_path(
596 &self,
597 skill_name: &str,
598 instance_name: &str,
599 skill_path: &PathBuf,
600 ) -> Result<Vec<DiscoveredTool>> {
601 let mut tools = Vec::new();
602
603 if let Some(skill_md) = self.local_loader.load_skill_md(skill_path) {
605 for (tool_name, tool_doc) in skill_md.tool_docs {
606 let parameters: Vec<ToolParameter> = tool_doc
607 .parameters
608 .iter()
609 .map(|p| ToolParameter {
610 name: p.name.clone(),
611 param_type: "string".to_string(),
612 description: p.description.clone(),
613 required: p.required,
614 })
615 .collect();
616
617 tools.push(DiscoveredTool {
618 skill_name: skill_name.to_string(),
619 instance_name: instance_name.to_string(),
620 tool_name,
621 description: tool_doc.description,
622 parameters,
623 source_path: Some(skill_path.clone()),
624 });
625 }
626 }
627
628 if tools.is_empty() {
630 if let Ok(_component) = self.local_loader.load_skill(skill_path, &self.engine).await {
631 let config = self
633 .instance_manager
634 .load_instance(skill_name, instance_name)
635 .unwrap_or_default();
636
637 if let Ok(executor) = SkillExecutor::load(
639 self.engine.clone(),
640 skill_path,
641 skill_name.to_string(),
642 instance_name.to_string(),
643 config,
644 )
645 .await
646 {
647 if let Ok(skill_tools) = executor.get_tools().await {
648 for tool in skill_tools {
649 let parameters: Vec<ToolParameter> = tool
650 .parameters
651 .iter()
652 .map(|p| ToolParameter {
653 name: p.name.clone(),
654 param_type: format!("{:?}", p.param_type),
655 description: p.description.clone(),
656 required: p.required,
657 })
658 .collect();
659
660 tools.push(DiscoveredTool {
661 skill_name: skill_name.to_string(),
662 instance_name: instance_name.to_string(),
663 tool_name: tool.name,
664 description: tool.description,
665 parameters,
666 source_path: Some(skill_path.clone()),
667 });
668 }
669 }
670 }
671 }
672 }
673
674 Ok(tools)
675 }
676
677 pub async fn execute_skill_tool(
679 &self,
680 skill_name: &str,
681 instance_name: &str,
682 tool_name: &str,
683 args: HashMap<String, serde_json::Value>,
684 ) -> Result<skill_runtime::ExecutionResult> {
685 let skill_path = if let Some(ref manifest) = self.manifest {
687 if let Some(skill) = manifest.get_skill(skill_name) {
688 let source = &skill.source;
689 if source.starts_with("./") || source.starts_with("../") {
690 manifest.base_dir.join(source)
691 } else {
692 PathBuf::from(source)
693 }
694 } else {
695 let home = dirs::home_dir().context("Failed to get home directory")?;
696 home.join(".skill-engine")
697 .join("registry")
698 .join(skill_name)
699 }
700 } else {
701 let home = dirs::home_dir().context("Failed to get home directory")?;
702 home.join(".skill-engine")
703 .join("registry")
704 .join(skill_name)
705 };
706
707 let args_vec: Vec<(String, String)> = args
709 .iter()
710 .map(|(k, v)| {
711 let value = match v {
712 serde_json::Value::String(s) => s.clone(),
713 other => other.to_string().trim_matches('"').to_string(),
714 };
715 (k.clone(), value)
716 })
717 .collect();
718
719 let wasm_path = self.find_wasm_in_path(&skill_path);
722
723 if let Ok(wasm_file) = wasm_path {
724 let config = self
726 .instance_manager
727 .load_instance(skill_name, instance_name)
728 .unwrap_or_default();
729
730 let executor = SkillExecutor::load(
731 self.engine.clone(),
732 &wasm_file, skill_name.to_string(),
734 instance_name.to_string(),
735 config,
736 )
737 .await?;
738
739 let result = executor.execute_tool(tool_name, args_vec).await?;
740
741 if result.success && result.output.starts_with("Command: ") {
743 return self.execute_native_command(&result.output).await;
744 }
745
746 Ok(result)
747 } else {
748 self.execute_native_skill(skill_name, tool_name, args_vec, &skill_path).await
750 }
751 }
752
753 fn find_wasm_in_path(&self, path: &PathBuf) -> Result<PathBuf> {
755 if path.extension().map_or(false, |ext| ext == "wasm") && path.exists() {
757 return Ok(path.clone());
758 }
759
760 if path.is_dir() {
762 let candidates = vec![
763 path.join("skill.wasm"),
764 path.join("dist/skill.wasm"),
765 ];
766
767 for candidate in candidates {
768 if candidate.exists() {
769 return Ok(candidate);
770 }
771 }
772 }
773
774 anyhow::bail!("No WASM file found in: {}", path.display())
775 }
776
777 async fn execute_native_skill(
779 &self,
780 skill_name: &str,
781 tool_name: &str,
782 args: Vec<(String, String)>,
783 skill_path: &PathBuf,
784 ) -> Result<skill_runtime::ExecutionResult> {
785 use std::process::Stdio;
786 use tokio::process::Command;
787
788 let skill_md = self.local_loader.load_skill_md(skill_path)
790 .ok_or_else(|| anyhow::anyhow!("No SKILL.md found for native skill: {}", skill_name))?;
791
792 let command_str = self.build_native_command(skill_name, tool_name, &args, &skill_md)?;
794
795 tracing::info!(command = %command_str, "Executing native command");
796
797 let parts: Vec<&str> = command_str.split_whitespace().collect();
799 if parts.is_empty() {
800 return Ok(skill_runtime::ExecutionResult {
801 success: false,
802 output: String::new(),
803 error_message: Some("Empty command".to_string()),
804 metadata: None,
805 });
806 }
807
808 let program = parts[0];
809 let cmd_args = &parts[1..];
810
811 let allowed_commands = ["kubectl", "helm", "git", "curl", "jq", "aws", "gcloud", "az", "docker", "terraform"];
813 if !allowed_commands.contains(&program) {
814 return Ok(skill_runtime::ExecutionResult {
815 success: false,
816 output: String::new(),
817 error_message: Some(format!(
818 "Command '{}' not allowed. Allowed: {}",
819 program,
820 allowed_commands.join(", ")
821 )),
822 metadata: None,
823 });
824 }
825
826 let result = Command::new(program)
828 .args(cmd_args)
829 .stdout(Stdio::piped())
830 .stderr(Stdio::piped())
831 .output()
832 .await;
833
834 match result {
835 Ok(output) => {
836 let stdout = String::from_utf8_lossy(&output.stdout).to_string();
837 let stderr = String::from_utf8_lossy(&output.stderr).to_string();
838
839 if output.status.success() {
840 Ok(skill_runtime::ExecutionResult {
841 success: true,
842 output: stdout,
843 error_message: if stderr.is_empty() {
844 None
845 } else {
846 Some(stderr)
847 },
848 metadata: None,
849 })
850 } else {
851 Ok(skill_runtime::ExecutionResult {
852 success: false,
853 output: stdout,
854 error_message: Some(if stderr.is_empty() {
855 format!("Command exited with status: {}", output.status)
856 } else {
857 stderr
858 }),
859 metadata: None,
860 })
861 }
862 }
863 Err(e) => Ok(skill_runtime::ExecutionResult {
864 success: false,
865 output: String::new(),
866 error_message: Some(format!("Failed to execute command: {}", e)),
867 metadata: None,
868 }),
869 }
870 }
871
872 fn build_native_command(
874 &self,
875 skill_name: &str,
876 tool_name: &str,
877 args: &[(String, String)],
878 skill_md: &skill_runtime::SkillMdContent,
879 ) -> Result<String> {
880 let base_command = match skill_name {
882 "kubernetes" => "kubectl",
883 "aws" => "aws",
884 "docker" => "docker",
885 "terraform" => "terraform",
886 "helm" => "helm",
887 _ => {
888 if let Some(ref allowed) = skill_md.frontmatter.allowed_tools {
890 allowed.split(',').next().unwrap_or(skill_name).trim()
891 } else {
892 skill_name
893 }
894 }
895 };
896
897 let mut cmd_parts = vec![base_command.to_string()];
899
900 if skill_name == "kubernetes" {
903 match tool_name {
904 "get" => {
905 cmd_parts.push("get".to_string());
906 for (key, value) in args {
907 match key.as_str() {
908 "resource" => cmd_parts.push(value.clone()),
909 "name" => cmd_parts.push(value.clone()),
910 "namespace" | "n" => {
911 cmd_parts.push("-n".to_string());
912 cmd_parts.push(value.clone());
913 }
914 "output" | "o" => {
915 cmd_parts.push("-o".to_string());
916 cmd_parts.push(value.clone());
917 }
918 "all-namespaces" | "A" => {
919 if value == "true" {
920 cmd_parts.push("-A".to_string());
921 }
922 }
923 "selector" | "l" => {
924 cmd_parts.push("-l".to_string());
925 cmd_parts.push(value.clone());
926 }
927 _ => {}
928 }
929 }
930 }
931 "describe" => {
932 cmd_parts.push("describe".to_string());
933 for (key, value) in args {
934 match key.as_str() {
935 "resource" => cmd_parts.push(value.clone()),
936 "name" => cmd_parts.push(value.clone()),
937 "namespace" | "n" => {
938 cmd_parts.push("-n".to_string());
939 cmd_parts.push(value.clone());
940 }
941 _ => {}
942 }
943 }
944 }
945 "logs" => {
946 cmd_parts.push("logs".to_string());
947 for (key, value) in args {
948 match key.as_str() {
949 "pod" | "name" => cmd_parts.push(value.clone()),
950 "container" | "c" => {
951 cmd_parts.push("-c".to_string());
952 cmd_parts.push(value.clone());
953 }
954 "namespace" | "n" => {
955 cmd_parts.push("-n".to_string());
956 cmd_parts.push(value.clone());
957 }
958 "tail" => {
959 cmd_parts.push("--tail".to_string());
960 cmd_parts.push(value.clone());
961 }
962 "follow" | "f" => {
963 if value == "true" {
964 cmd_parts.push("-f".to_string());
965 }
966 }
967 _ => {}
968 }
969 }
970 }
971 "cluster-info" => {
972 cmd_parts.push("cluster-info".to_string());
973 }
974 "config" => {
975 cmd_parts.push("config".to_string());
976 for (key, value) in args {
977 match key.as_str() {
978 "subcommand" => cmd_parts.push(value.clone()),
979 "context" => cmd_parts.push(value.clone()),
980 _ => {}
981 }
982 }
983 }
984 "create" => {
985 cmd_parts.push("create".to_string());
986 for (key, value) in args {
987 match key.as_str() {
988 "resource" => cmd_parts.push(value.clone()),
989 "name" => cmd_parts.push(value.clone()),
990 "namespace" | "n" => {
991 cmd_parts.push("-n".to_string());
992 cmd_parts.push(value.clone());
993 }
994 "image" => {
995 cmd_parts.push("--image".to_string());
996 cmd_parts.push(value.clone());
997 }
998 _ => {}
999 }
1000 }
1001 }
1002 "delete" => {
1003 cmd_parts.push("delete".to_string());
1004 for (key, value) in args {
1005 match key.as_str() {
1006 "resource" => cmd_parts.push(value.clone()),
1007 "name" => cmd_parts.push(value.clone()),
1008 "namespace" | "n" => {
1009 cmd_parts.push("-n".to_string());
1010 cmd_parts.push(value.clone());
1011 }
1012 _ => {}
1013 }
1014 }
1015 }
1016 "scale" => {
1017 cmd_parts.push("scale".to_string());
1018 let mut resource_set = false;
1019 for (key, value) in args {
1020 match key.as_str() {
1021 "resource" => {
1022 cmd_parts.push(value.clone());
1023 resource_set = true;
1024 }
1025 "name" => {
1026 if resource_set {
1027 if let Some(last) = cmd_parts.last_mut() {
1029 last.push('/');
1030 last.push_str(value);
1031 }
1032 } else {
1033 cmd_parts.push(value.clone());
1034 }
1035 }
1036 "replicas" => {
1037 cmd_parts.push(format!("--replicas={}", value));
1038 }
1039 "namespace" | "n" => {
1040 cmd_parts.push("-n".to_string());
1041 cmd_parts.push(value.clone());
1042 }
1043 _ => {}
1044 }
1045 }
1046 }
1047 "top" => {
1048 cmd_parts.push("top".to_string());
1049 for (key, value) in args {
1050 match key.as_str() {
1051 "resource" => cmd_parts.push(value.clone()),
1052 "name" => cmd_parts.push(value.clone()),
1053 "namespace" | "n" => {
1054 cmd_parts.push("-n".to_string());
1055 cmd_parts.push(value.clone());
1056 }
1057 _ => {}
1058 }
1059 }
1060 }
1061 "rollout" => {
1062 cmd_parts.push("rollout".to_string());
1063 for (key, value) in args {
1064 match key.as_str() {
1065 "action" => cmd_parts.push(value.clone()),
1066 "resource" => cmd_parts.push(value.clone()),
1067 "name" => cmd_parts.push(value.clone()),
1068 "namespace" | "n" => {
1069 cmd_parts.push("-n".to_string());
1070 cmd_parts.push(value.clone());
1071 }
1072 _ => {}
1073 }
1074 }
1075 }
1076 "apply" => {
1077 cmd_parts.push("apply".to_string());
1078 for (key, value) in args {
1079 match key.as_str() {
1080 "file" | "f" => {
1081 cmd_parts.push("-f".to_string());
1082 cmd_parts.push(value.clone());
1083 }
1084 "namespace" | "n" => {
1085 cmd_parts.push("-n".to_string());
1086 cmd_parts.push(value.clone());
1087 }
1088 _ => {}
1089 }
1090 }
1091 }
1092 "exec" => {
1093 cmd_parts.push("exec".to_string());
1094 let mut pod_name = String::new();
1095 let mut container = String::new();
1096 let mut namespace = String::new();
1097 let mut command = String::new();
1098
1099 for (key, value) in args {
1100 match key.as_str() {
1101 "pod" | "name" => pod_name = value.clone(),
1102 "container" | "c" => container = value.clone(),
1103 "namespace" | "n" => namespace = value.clone(),
1104 "command" => command = value.clone(),
1105 _ => {}
1106 }
1107 }
1108
1109 if !namespace.is_empty() {
1110 cmd_parts.push("-n".to_string());
1111 cmd_parts.push(namespace);
1112 }
1113 cmd_parts.push(pod_name);
1114 if !container.is_empty() {
1115 cmd_parts.push("-c".to_string());
1116 cmd_parts.push(container);
1117 }
1118 cmd_parts.push("--".to_string());
1119 cmd_parts.extend(command.split_whitespace().map(|s| s.to_string()));
1120 }
1121 "label" => {
1122 cmd_parts.push("label".to_string());
1123 for (key, value) in args {
1124 match key.as_str() {
1125 "resource" => cmd_parts.push(value.clone()),
1126 "name" => cmd_parts.push(value.clone()),
1127 "labels" => cmd_parts.push(value.clone()),
1128 "namespace" | "n" => {
1129 cmd_parts.push("-n".to_string());
1130 cmd_parts.push(value.clone());
1131 }
1132 _ => {}
1133 }
1134 }
1135 }
1136 "annotate" => {
1137 cmd_parts.push("annotate".to_string());
1138 for (key, value) in args {
1139 match key.as_str() {
1140 "resource" => cmd_parts.push(value.clone()),
1141 "name" => cmd_parts.push(value.clone()),
1142 "annotations" => cmd_parts.push(value.clone()),
1143 "namespace" | "n" => {
1144 cmd_parts.push("-n".to_string());
1145 cmd_parts.push(value.clone());
1146 }
1147 _ => {}
1148 }
1149 }
1150 }
1151 "cordon" => {
1152 cmd_parts.push("cordon".to_string());
1153 for (key, value) in args {
1154 if key == "node" || key == "name" {
1155 cmd_parts.push(value.clone());
1156 }
1157 }
1158 }
1159 "uncordon" => {
1160 cmd_parts.push("uncordon".to_string());
1161 for (key, value) in args {
1162 if key == "node" || key == "name" {
1163 cmd_parts.push(value.clone());
1164 }
1165 }
1166 }
1167 "drain" => {
1168 cmd_parts.push("drain".to_string());
1169 for (key, value) in args {
1170 match key.as_str() {
1171 "node" | "name" => cmd_parts.push(value.clone()),
1172 "ignore-daemonsets" => {
1173 if value == "true" {
1174 cmd_parts.push("--ignore-daemonsets".to_string());
1175 }
1176 }
1177 "delete-emptydir-data" => {
1178 if value == "true" {
1179 cmd_parts.push("--delete-emptydir-data".to_string());
1180 }
1181 }
1182 "force" => {
1183 if value == "true" {
1184 cmd_parts.push("--force".to_string());
1185 }
1186 }
1187 _ => {}
1188 }
1189 }
1190 }
1191 "taint" => {
1192 cmd_parts.push("taint".to_string());
1193 cmd_parts.push("nodes".to_string());
1194 for (key, value) in args {
1195 match key.as_str() {
1196 "node" | "name" => cmd_parts.push(value.clone()),
1197 "taint" => cmd_parts.push(value.clone()),
1198 _ => {}
1199 }
1200 }
1201 }
1202 "raw" => {
1203 cmd_parts.clear();
1205 for (key, value) in args {
1206 if key == "command" {
1207 return Ok(value.clone());
1208 }
1209 }
1210 return Err(anyhow::anyhow!("raw tool requires 'command' argument"));
1211 }
1212 _ => {
1213 cmd_parts.push(tool_name.to_string());
1215 for (_, value) in args {
1216 cmd_parts.push(value.clone());
1217 }
1218 }
1219 }
1220 } else {
1221 cmd_parts.push(tool_name.to_string());
1223 for (_, value) in args {
1224 cmd_parts.push(value.clone());
1225 }
1226 }
1227
1228 Ok(cmd_parts.join(" "))
1229 }
1230
1231 async fn execute_native_command(
1233 &self,
1234 output: &str,
1235 ) -> Result<skill_runtime::ExecutionResult> {
1236 use std::process::Stdio;
1237 use tokio::process::Command;
1238
1239 let first_line = output.lines().next().unwrap_or("");
1241 let command_str = first_line.strip_prefix("Command: ").unwrap_or(first_line);
1242
1243 let parts: Vec<&str> = command_str.split_whitespace().collect();
1245 if parts.is_empty() {
1246 return Ok(skill_runtime::ExecutionResult {
1247 success: false,
1248 output: String::new(),
1249 error_message: Some("Empty command".to_string()),
1250 metadata: None,
1251 });
1252 }
1253
1254 let program = parts[0];
1255 let cmd_args = &parts[1..];
1256
1257 let allowed_commands = ["kubectl", "helm", "git", "curl", "jq", "aws", "gcloud", "az", "docker", "terraform"];
1259 if !allowed_commands.contains(&program) {
1260 return Ok(skill_runtime::ExecutionResult {
1261 success: false,
1262 output: String::new(),
1263 error_message: Some(format!(
1264 "Command '{}' not allowed. Allowed: {}",
1265 program,
1266 allowed_commands.join(", ")
1267 )),
1268 metadata: None,
1269 });
1270 }
1271
1272 tracing::info!(command = %command_str, "Executing native command");
1273
1274 let result = Command::new(program)
1276 .args(cmd_args)
1277 .stdout(Stdio::piped())
1278 .stderr(Stdio::piped())
1279 .output()
1280 .await;
1281
1282 match result {
1283 Ok(output) => {
1284 let stdout = String::from_utf8_lossy(&output.stdout).to_string();
1285 let stderr = String::from_utf8_lossy(&output.stderr).to_string();
1286
1287 if output.status.success() {
1288 Ok(skill_runtime::ExecutionResult {
1289 success: true,
1290 output: stdout,
1291 error_message: if stderr.is_empty() {
1292 None
1293 } else {
1294 Some(stderr)
1295 },
1296 metadata: None,
1297 })
1298 } else {
1299 Ok(skill_runtime::ExecutionResult {
1300 success: false,
1301 output: stdout,
1302 error_message: Some(if stderr.is_empty() {
1303 format!("Command exited with status: {}", output.status)
1304 } else {
1305 stderr
1306 }),
1307 metadata: None,
1308 })
1309 }
1310 }
1311 Err(e) => Ok(skill_runtime::ExecutionResult {
1312 success: false,
1313 output: String::new(),
1314 error_message: Some(format!("Failed to execute command: {}", e)),
1315 metadata: None,
1316 }),
1317 }
1318 }
1319
1320 pub async fn list_skills_output(
1322 &self,
1323 filter_skill: Option<&str>,
1324 offset: Option<usize>,
1325 limit: Option<usize>,
1326 ) -> String {
1327 let tools = self.tools.read().await;
1328
1329 let mut all_tools: Vec<&DiscoveredTool> = tools.values()
1331 .filter(|tool| {
1332 filter_skill.map_or(true, |filter| tool.skill_name == filter)
1333 })
1334 .collect();
1335
1336 all_tools.sort_by(|a, b| {
1338 (&a.skill_name, &a.tool_name).cmp(&(&b.skill_name, &b.tool_name))
1339 });
1340
1341 let total = all_tools.len();
1342 let offset = offset.unwrap_or(0);
1343
1344 let paginated_tools: Vec<&DiscoveredTool> = if let Some(limit) = limit {
1346 all_tools.into_iter().skip(offset).take(limit).collect()
1347 } else {
1348 all_tools.into_iter().skip(offset).collect()
1349 };
1350
1351 let returned = paginated_tools.len();
1352 let has_more = offset + returned < total;
1353
1354 let mut output = String::new();
1355
1356 if total == 0 {
1357 output.push_str("No skills found. Install skills with `skill install <source>`\n");
1358 return output;
1359 }
1360
1361 output.push_str(&format!(
1363 "📊 **Pagination**: Showing {} of {} tools",
1364 returned, total
1365 ));
1366 if offset > 0 || limit.is_some() {
1367 output.push_str(&format!(" (offset: {}", offset));
1368 if let Some(l) = limit {
1369 output.push_str(&format!(", limit: {}", l));
1370 }
1371 output.push(')');
1372 }
1373 if has_more {
1374 let next_offset = offset + returned;
1375 output.push_str(&format!("\n💡 **Next page**: Use offset={}", next_offset));
1376 }
1377 output.push_str("\n\n");
1378
1379 let mut grouped: HashMap<String, Vec<&DiscoveredTool>> = HashMap::new();
1381 for tool in paginated_tools {
1382 grouped.entry(tool.skill_name.clone()).or_default().push(tool);
1383 }
1384
1385 output.push_str("Available Skills and Tools:\n\n");
1386
1387 let mut skill_names: Vec<_> = grouped.keys().cloned().collect();
1389 skill_names.sort();
1390
1391 for skill_name in skill_names {
1392 let skill_tools = grouped.get(&skill_name).unwrap();
1393 output.push_str(&format!("## {}\n", skill_name));
1394 for tool in skill_tools {
1395 output.push_str(&format!(" - **{}**: {}\n", tool.tool_name, tool.description));
1396 if !tool.parameters.is_empty() {
1397 for param in &tool.parameters {
1398 let req = if param.required { " (required)" } else { "" };
1399 output.push_str(&format!(" - `{}`: {}{}\n", param.name, param.description, req));
1400 }
1401 }
1402 }
1403 output.push('\n');
1404 }
1405
1406 output
1407 }
1408
1409 pub async fn search_skills(&self, query: &str, top_k: usize) -> Result<String> {
1411 let tools = self.tools.read().await;
1412
1413 if tools.is_empty() {
1414 return Ok("No skills installed. Install skills with `skill install <source>`".to_string());
1415 }
1416
1417 let mut pipeline_lock = self.search_pipeline.write().await;
1419 if pipeline_lock.is_none() {
1420 let config = SearchConfig::default();
1421 let pipeline = SearchPipeline::from_config(config).await
1422 .map_err(|e| anyhow::anyhow!("Failed to create search pipeline: {}", e))?;
1423 *pipeline_lock = Some(pipeline);
1424 }
1425 let pipeline = pipeline_lock.as_ref().unwrap();
1426
1427 let index_docs: Vec<IndexDocument> = tools.values().map(|t| {
1429 let param_text = t.parameters.iter()
1431 .map(|p| {
1432 let req = if p.required { "required" } else { "optional" };
1433 format!("{} ({}, {}): {}", p.name, p.param_type, req, p.description)
1434 })
1435 .collect::<Vec<_>>()
1436 .join("; ");
1437
1438 let full_text = format!(
1439 "Tool: {} | Description: {} | Skill: {} | Parameters: {}",
1440 t.tool_name,
1441 t.description,
1442 t.skill_name,
1443 if param_text.is_empty() { "none".to_string() } else { param_text }
1444 );
1445
1446 IndexDocument {
1447 id: format!("{}@{}:{}", t.skill_name, t.instance_name, t.tool_name),
1448 content: full_text,
1449 metadata: DocumentMetadata {
1450 skill_name: Some(t.skill_name.clone()),
1451 instance_name: Some(t.instance_name.clone()),
1452 tool_name: Some(t.tool_name.clone()),
1453 category: None,
1454 tags: Vec::new(),
1455 custom: HashMap::new(),
1456 },
1457 }
1458 }).collect();
1459
1460 pipeline.index_documents(index_docs).await
1462 .map_err(|e| anyhow::anyhow!("Failed to index tools: {}", e))?;
1463
1464 let results = pipeline.search(query, top_k).await
1466 .map_err(|e| anyhow::anyhow!("Search failed: {}", e))?;
1467
1468 let mut output = String::new();
1470 output.push_str(&format!("## 🔍 Search Results for: \"{}\"\n\n", query));
1471
1472 if results.is_empty() {
1473 output.push_str("No matching tools found for your query.\n\n");
1474 output.push_str("**Suggestions:**\n");
1475 output.push_str("- Try different keywords\n");
1476 output.push_str("- Use `list_skills` to see all available tools\n");
1477 output.push_str("- Install more skills with `skill install <source>`\n");
1478 } else {
1479 output.push_str(&format!("Found **{}** relevant tools:\n\n", results.len()));
1480
1481 for (i, result) in results.iter().enumerate() {
1482 let score_pct = (result.score * 100.0) as u32;
1483 let relevance = match score_pct {
1484 80..=100 => "🟢 Excellent match",
1485 60..=79 => "🟡 Good match",
1486 40..=59 => "🟠 Fair match",
1487 _ => "🔴 Partial match",
1488 };
1489
1490 let tool_info = tools.get(&result.id);
1492
1493 let skill_name = result.metadata.skill_name.as_deref().unwrap_or("unknown");
1494 let instance_name = result.metadata.instance_name.as_deref().unwrap_or("default");
1495 let tool_name = result.metadata.tool_name.as_deref().unwrap_or("unknown");
1496
1497 output.push_str(&format!("---\n\n### {}. **{}** ({}% - {})\n\n",
1498 i + 1, tool_name, score_pct, relevance));
1499
1500 if let Some(t) = tool_info {
1502 output.push_str(&format!("**Description:** {}\n\n", t.description));
1503
1504 if !t.parameters.is_empty() {
1506 output.push_str("**Parameters:**\n");
1507 for param in &t.parameters {
1508 let req_badge = if param.required { "🔴 required" } else { "⚪ optional" };
1509 output.push_str(&format!(
1510 "- `{}` ({}) - {} [{}]\n",
1511 param.name, param.param_type, param.description, req_badge
1512 ));
1513 }
1514 output.push('\n');
1515 }
1516
1517 output.push_str("**How to Execute:**\n");
1519 output.push_str("```json\n");
1520 output.push_str("{\n");
1521 output.push_str(&format!(" \"skill\": \"{}\",\n", skill_name));
1522 output.push_str(&format!(" \"tool\": \"{}\",\n", tool_name));
1523 output.push_str(&format!(" \"instance\": \"{}\",\n", instance_name));
1524
1525 if !t.parameters.is_empty() {
1526 output.push_str(" \"args\": {\n");
1527 for (idx, param) in t.parameters.iter().enumerate() {
1528 let comma = if idx < t.parameters.len() - 1 { "," } else { "" };
1529 let placeholder = match param.param_type.as_str() {
1530 "string" => "\"<value>\"",
1531 "number" | "integer" => "0",
1532 "boolean" => "true",
1533 _ => "\"<value>\"",
1534 };
1535 let comment = if param.required { " // required" } else { " // optional" };
1536 output.push_str(&format!(" \"{}\": {}{}{}\n",
1537 param.name, placeholder, comma, comment));
1538 }
1539 output.push_str(" }\n");
1540 } else {
1541 output.push_str(" \"args\": {}\n");
1542 }
1543 output.push_str("}\n");
1544 output.push_str("```\n\n");
1545 } else {
1546 output.push_str(&format!("**Skill:** {} | **Instance:** {}\n\n", skill_name, instance_name));
1548 output.push_str(&format!(
1549 "**Execute with:** `execute(skill='{}', tool='{}', instance='{}')`\n\n",
1550 skill_name, tool_name, instance_name
1551 ));
1552 }
1553 }
1554
1555 output.push_str("---\n\n");
1557 output.push_str("**💡 Tips:**\n");
1558 output.push_str("- Use `execute` tool with the JSON structure shown above\n");
1559 output.push_str("- Required parameters must be provided\n");
1560 output.push_str("- Use `list_skills` to see all available tools\n");
1561 }
1562
1563 Ok(output)
1564 }
1565
1566 #[cfg(feature = "ai-ingestion")]
1568 pub async fn generate_examples(
1569 &self,
1570 skill_name: &str,
1571 tool_name: Option<&str>,
1572 _count: usize,
1573 ) -> Result<String> {
1574 use skill_runtime::{SearchConfig, SearchPipeline, GenerationEvent, IndexDocument, DocumentMetadata, parse_skill_md};
1575 use tokio_stream::StreamExt;
1576
1577 let home = dirs::home_dir().context("Failed to get home directory")?;
1579 let skill_dir = home.join(".skill-engine").join("registry").join(skill_name);
1580
1581 if !skill_dir.exists() {
1582 anyhow::bail!("Skill '{}' not found in registry", skill_name);
1583 }
1584
1585 let skill_md_path = skill_dir.join("SKILL.md");
1587 if !skill_md_path.exists() {
1588 anyhow::bail!("No SKILL.md found for skill '{}'", skill_name);
1589 }
1590
1591 let skill_md = parse_skill_md(&skill_md_path)
1592 .context("Failed to parse SKILL.md")?;
1593
1594 let tools: Vec<_> = skill_md.tool_docs.into_values()
1596 .filter(|t| tool_name.map_or(true, |name| t.name == name))
1597 .collect();
1598
1599 if tools.is_empty() {
1600 if let Some(name) = tool_name {
1601 anyhow::bail!("Tool '{}' not found in skill '{}'", name, skill_name);
1602 }
1603 anyhow::bail!("No tools found in skill '{}'", skill_name);
1604 }
1605
1606 let config_path = home.join(".skill-engine").join("search.toml");
1608 let config = if config_path.exists() {
1609 SearchConfig::from_toml_file(&config_path)?
1610 } else {
1611 SearchConfig::default()
1612 };
1613
1614 if !config.ai_ingestion.enabled {
1615 anyhow::bail!(
1616 "AI ingestion not enabled. Enable it with `skill setup` or \
1617 edit ~/.skill-engine/search.toml"
1618 );
1619 }
1620
1621 let pipeline = SearchPipeline::from_config(config).await
1623 .context("Failed to create search pipeline")?;
1624
1625 if !pipeline.has_example_generator() {
1626 anyhow::bail!("LLM provider not available. Check your AI ingestion configuration.");
1627 }
1628
1629 let documents: Vec<IndexDocument> = tools.iter()
1631 .map(|t| IndexDocument {
1632 id: format!("{}:{}", skill_name, t.name),
1633 content: format!(
1634 "Tool: {}\nDescription: {}\nParameters: {}",
1635 t.name, t.description,
1636 t.parameters.iter()
1637 .map(|p| format!("{} ({})", p.name, p.param_type))
1638 .collect::<Vec<_>>()
1639 .join(", ")
1640 ),
1641 metadata: DocumentMetadata {
1642 skill_name: Some(skill_name.to_string()),
1643 tool_name: Some(t.name.clone()),
1644 ..Default::default()
1645 },
1646 })
1647 .collect();
1648
1649 let mut stream = Box::pin(pipeline.index_documents_stream(documents, tools.clone()));
1651 let mut all_examples = Vec::new();
1652 let mut output = String::new();
1653
1654 output.push_str(&format!("## Generated Examples for {}\n\n", skill_name));
1655
1656 while let Some(event) = stream.next().await {
1657 match event {
1658 GenerationEvent::Started { tool_name, .. } => {
1659 output.push_str(&format!("### {}\n\n", tool_name));
1660 }
1661 GenerationEvent::Example { example } => {
1662 all_examples.push(example.clone());
1663 output.push_str(&format!(
1664 "**Command:** `{}`\n**Explanation:** {}\n\n",
1665 example.command, example.explanation
1666 ));
1667 }
1668 GenerationEvent::ToolCompleted { examples_generated, valid_examples, .. } => {
1669 output.push_str(&format!(
1670 "_Generated {} examples ({} valid)_\n\n",
1671 examples_generated, valid_examples
1672 ));
1673 }
1674 GenerationEvent::Error { message, tool_name, .. } => {
1675 let prefix = tool_name.map(|n| format!("[{}] ", n)).unwrap_or_default();
1676 output.push_str(&format!("⚠️ {}Error: {}\n\n", prefix, message));
1677 }
1678 GenerationEvent::Completed { total_examples, total_valid, total_tools, .. } => {
1679 output.push_str(&format!(
1680 "---\n\n**Summary:** {} examples ({} valid) for {} tools\n",
1681 total_examples, total_valid, total_tools
1682 ));
1683 }
1684 _ => {}
1685 }
1686 }
1687
1688 Ok(output)
1689 }
1690
1691 #[cfg(not(feature = "ai-ingestion"))]
1693 pub async fn generate_examples(
1694 &self,
1695 _skill_name: &str,
1696 _tool_name: Option<&str>,
1697 _count: usize,
1698 ) -> Result<String> {
1699 anyhow::bail!(
1700 "AI example generation not available. \
1701 Rebuild with --features ai-ingestion"
1702 )
1703 }
1704
1705 pub async fn run(self) -> Result<()> {
1707 tracing::info!("Starting MCP server with stdio transport");
1708
1709 let discovered = self.discover_tools().await?;
1711 tracing::info!("Discovered {} tools from skills", discovered.len());
1712
1713 let router = Router::new(self)
1715 .with_tool(execute_tool_route())
1716 .with_tool(list_skills_tool_route())
1717 .with_tool(search_skills_tool_route())
1718 .with_tool(generate_examples_tool_route());
1719
1720 router.serve(stdio())
1723 .await?
1724 .waiting()
1725 .await?;
1726
1727 Ok(())
1728 }
1729
1730 pub async fn run_http(host: &str, port: u16, manifest: Option<SkillManifest>) -> Result<()> {
1732 use rmcp::transport::streamable_http_server::{
1733 StreamableHttpService, session::local::LocalSessionManager,
1734 };
1735
1736 tracing::info!("Starting MCP server with HTTP streaming at {}:{}", host, port);
1737
1738 let manifest_clone = manifest.clone();
1740 let server_factory = move || -> std::result::Result<McpServer, std::io::Error> {
1741 let manifest = manifest_clone.clone();
1742 let server = if let Some(m) = manifest {
1743 McpServer::with_manifest(m)
1744 .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?
1745 } else {
1746 McpServer::new()
1747 .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?
1748 };
1749 Ok(server)
1750 };
1751
1752 let service = StreamableHttpService::new(
1754 server_factory,
1755 LocalSessionManager::default().into(),
1756 Default::default(),
1757 );
1758
1759 let router = axum::Router::new().nest_service("/mcp", service);
1761
1762 let addr = format!("{}:{}", host, port);
1764 let tcp_listener = tokio::net::TcpListener::bind(&addr).await
1765 .map_err(|e| anyhow::anyhow!("Failed to bind to {}: {}", addr, e))?;
1766
1767 tracing::info!("MCP HTTP server ready at http://{}/mcp", addr);
1768
1769 axum::serve(tcp_listener, router)
1770 .with_graceful_shutdown(async {
1771 tokio::signal::ctrl_c().await.ok();
1772 })
1773 .await
1774 .map_err(|e| anyhow::anyhow!("HTTP server error: {}", e))?;
1775
1776 Ok(())
1777 }
1778}
1779
1780impl ServerHandler for McpServer {
1782 fn get_info(&self) -> ServerInfo {
1783 ServerInfo {
1784 protocol_version: ProtocolVersion::V_2024_11_05,
1785 capabilities: ServerCapabilities::builder().enable_tools().build(),
1786 server_info: Implementation::from_build_env(),
1787 instructions: Some(
1788 "Skill Engine MCP Server - Execute installed skills and their tools. \
1789 Use `list_skills` to discover available skills, then `execute` to run tools. \
1790 Example: execute(skill='kubernetes', tool='get', args={resource: 'pods'})"
1791 .to_string(),
1792 ),
1793 }
1794 }
1795}
1796
1797fn execute_tool_route() -> ToolRoute<McpServer> {
1801 use futures::FutureExt;
1802 use rmcp::handler::server::tool::ToolCallContext;
1803
1804 let execute_schema: serde_json::Map<String, serde_json::Value> = serde_json::from_value(serde_json::json!({
1805 "type": "object",
1806 "properties": {
1807 "skill": {
1808 "type": "string",
1809 "description": "The skill name to execute (e.g., 'kubernetes', 'aws')"
1810 },
1811 "tool": {
1812 "type": "string",
1813 "description": "The tool name within the skill (e.g., 'get', 'describe')"
1814 },
1815 "instance": {
1816 "type": "string",
1817 "description": "The instance name (default: 'default')",
1818 "default": "default"
1819 },
1820 "args": {
1821 "type": "object",
1822 "description": "Tool arguments as key-value pairs",
1823 "additionalProperties": true
1824 },
1825 "max_output": {
1827 "type": "integer",
1828 "description": "Maximum characters in output to prevent context overflow (e.g., 4000 for ~1000 tokens)"
1829 },
1830 "truncate": {
1831 "type": "string",
1832 "enum": ["head", "tail", "middle", "smart"],
1833 "description": "Truncation strategy: 'head' (keep start), 'tail' (keep end), 'middle' (keep both ends), 'smart' (preserve structure, default)"
1834 },
1835 "grep": {
1836 "type": "string",
1837 "description": "Regex pattern to filter output lines. Only matching lines are returned. Example: 'error|warning'"
1838 },
1839 "grep_invert": {
1840 "type": "boolean",
1841 "description": "Invert grep match - return lines that DON'T match the pattern"
1842 },
1843 "head": {
1844 "type": "integer",
1845 "description": "Return only first N lines of output"
1846 },
1847 "tail": {
1848 "type": "integer",
1849 "description": "Return only last N lines of output"
1850 },
1851 "format": {
1852 "type": "string",
1853 "enum": ["json", "lines", "count", "compact"],
1854 "description": "Transform output: 'json' (pretty-print), 'lines' (array), 'count' (line count), 'compact' (remove whitespace)"
1855 },
1856 "jq": {
1857 "type": "string",
1858 "description": "JSONPath to extract from JSON output. Examples: '.items[].name', '.metadata', '.items[0:5]'"
1859 },
1860 "include_metadata": {
1861 "type": "boolean",
1862 "description": "Include execution metadata (timing, truncation info, original size)"
1863 }
1864 },
1865 "required": ["skill", "tool"]
1866 })).unwrap();
1867
1868 let tool = Tool {
1869 name: Cow::Borrowed("execute"),
1870 title: None,
1871 description: Some(Cow::Borrowed(
1872 "Execute a skill tool with context engineering features. \
1873 Use max_output to limit response size, grep to filter, jq to extract JSON fields. \
1874 Examples:\n\
1875 - Basic: execute(skill='k8s', tool='get', args={resource:'pods'})\n\
1876 - With filter: execute(skill='k8s', tool='get', args={...}, grep='Running', head=10)\n\
1877 - JSON extract: execute(skill='k8s', tool='get', args={...}, jq='.items[].metadata.name')\n\
1878 - Size limit: execute(skill='k8s', tool='logs', args={...}, max_output=4000, truncate='tail')"
1879 )),
1880 input_schema: Arc::new(execute_schema),
1881 output_schema: None,
1882 annotations: None,
1883 icons: None,
1884 meta: None,
1885 };
1886
1887 ToolRoute::new_dyn(tool, |ctx: ToolCallContext<'_, McpServer>| {
1888 async move {
1889 let start_time = std::time::Instant::now();
1890 let args = ctx.arguments.clone().unwrap_or_default();
1891 let request: ExecuteSkillRequest = serde_json::from_value(serde_json::Value::Object(args))
1892 .map_err(|e| McpError::invalid_params(format!("Invalid parameters: {}", e), None))?;
1893
1894 let result = ctx.service
1896 .execute_skill_tool(&request.skill, &request.instance, &request.tool, request.args)
1897 .await
1898 .map_err(|e| McpError::internal_error(format!("Skill execution failed: {}", e), None))?;
1899
1900 let elapsed = start_time.elapsed();
1901
1902 if result.success {
1903 let processed = process_output(
1905 &result.output,
1906 request.max_output,
1907 request.truncate.as_deref(),
1908 request.grep.as_deref(),
1909 request.grep_invert.unwrap_or(false),
1910 request.head,
1911 request.tail,
1912 request.format.as_deref(),
1913 request.jq.as_deref(),
1914 );
1915
1916 let output = if request.include_metadata.unwrap_or(false) {
1918 let mut response = String::new();
1920
1921 if processed.truncated || !processed.processing.is_empty() {
1922 response.push_str("📊 **Execution Metadata**\n");
1923 response.push_str(&format!("- Execution time: {:?}\n", elapsed));
1924 response.push_str(&format!("- Original size: {} chars\n", processed.original_length));
1925 response.push_str(&format!("- Final size: {} chars\n", processed.final_length));
1926
1927 if processed.truncated {
1928 response.push_str("- ⚠️ Output was truncated\n");
1929 }
1930
1931 if let Some(matches) = processed.grep_matches {
1932 response.push_str(&format!("- Grep matches: {} lines\n", matches));
1933 }
1934
1935 if !processed.processing.is_empty() {
1936 response.push_str(&format!("- Processing: {}\n", processed.processing.join(" → ")));
1937 }
1938
1939 response.push_str("\n---\n\n");
1940 }
1941
1942 response.push_str(&processed.content);
1943 response
1944 } else {
1945 processed.content
1946 };
1947
1948 Ok(CallToolResult::success(vec![Content::text(output)]))
1949 } else {
1950 let error_msg = result.error_message.unwrap_or_else(|| "Unknown error".to_string());
1952 let error_output = format!(
1953 "❌ **Execution Failed**\n\n\
1954 **Skill:** {} | **Tool:** {} | **Instance:** {}\n\n\
1955 **Error:** {}\n\n\
1956 💡 **Tips:**\n\
1957 - Use `list_skills` to verify the skill/tool exists\n\
1958 - Use `search_skills` to find the right tool for your task\n\
1959 - Check that required arguments are provided",
1960 request.skill, request.tool, request.instance, error_msg
1961 );
1962 Ok(CallToolResult::error(vec![Content::text(error_output)]))
1963 }
1964 }.boxed()
1965 })
1966}
1967
1968fn list_skills_tool_route() -> ToolRoute<McpServer> {
1970 use futures::FutureExt;
1971 use rmcp::handler::server::tool::ToolCallContext;
1972
1973 let list_schema: serde_json::Map<String, serde_json::Value> = serde_json::from_value(serde_json::json!({
1974 "type": "object",
1975 "properties": {
1976 "skill": {
1977 "type": "string",
1978 "description": "Optional skill name to filter tools by"
1979 },
1980 "offset": {
1981 "type": "integer",
1982 "description": "Pagination offset (0-based index). Use with 'limit' to paginate through large tool lists.",
1983 "minimum": 0
1984 },
1985 "limit": {
1986 "type": "integer",
1987 "description": "Maximum number of tools to return. Use with 'offset' for pagination.",
1988 "minimum": 1
1989 }
1990 }
1991 })).unwrap();
1992
1993 let tool = Tool {
1994 name: Cow::Borrowed("list_skills"),
1995 title: None,
1996 description: Some(Cow::Borrowed("List all available skills and their tools. Supports pagination with offset/limit parameters.")),
1997 input_schema: Arc::new(list_schema),
1998 output_schema: None,
1999 annotations: None,
2000 icons: None,
2001 meta: None,
2002 };
2003
2004 ToolRoute::new_dyn(tool, |ctx: ToolCallContext<'_, McpServer>| {
2005 async move {
2006 let args = ctx.arguments.clone().unwrap_or_default();
2007 let request: ListSkillsRequest = serde_json::from_value(serde_json::Value::Object(args))
2008 .unwrap_or(ListSkillsRequest { skill: None, offset: None, limit: None });
2009 let output = ctx.service.list_skills_output(
2010 request.skill.as_deref(),
2011 request.offset,
2012 request.limit,
2013 ).await;
2014 Ok(CallToolResult::success(vec![Content::text(output)]))
2015 }.boxed()
2016 })
2017}
2018
2019#[derive(Debug, Deserialize, JsonSchema)]
2021pub struct SearchSkillsRequest {
2022 #[schemars(description = "Natural language query (e.g., 'list running pods', 'get aws s3 buckets')")]
2024 pub query: String,
2025
2026 #[serde(default = "default_top_k")]
2028 #[schemars(description = "Maximum number of results to return (default: 5)")]
2029 pub top_k: usize,
2030}
2031
2032fn default_top_k() -> usize {
2033 5
2034}
2035
2036#[derive(Debug, Deserialize, JsonSchema)]
2038pub struct GenerateExamplesRequest {
2039 #[schemars(description = "The skill name (e.g., 'kubernetes', 'aws')")]
2041 pub skill: String,
2042
2043 #[schemars(description = "Optional tool name within the skill. If not provided, generates for all tools.")]
2045 pub tool: Option<String>,
2046
2047 #[serde(default = "default_example_count")]
2049 #[schemars(description = "Number of examples to generate per tool (default: 5)")]
2050 pub count: usize,
2051}
2052
2053fn default_example_count() -> usize {
2054 5
2055}
2056
2057fn search_skills_tool_route() -> ToolRoute<McpServer> {
2059 use futures::FutureExt;
2060 use rmcp::handler::server::tool::ToolCallContext;
2061
2062 let search_schema: serde_json::Map<String, serde_json::Value> = serde_json::from_value(serde_json::json!({
2063 "type": "object",
2064 "properties": {
2065 "query": {
2066 "type": "string",
2067 "description": "Natural language query describing what you want to do (e.g., 'list running pods', 'get aws s3 buckets')"
2068 },
2069 "top_k": {
2070 "type": "integer",
2071 "description": "Maximum number of results to return (default: 5)",
2072 "default": 5
2073 }
2074 },
2075 "required": ["query"]
2076 })).unwrap();
2077
2078 let tool = Tool {
2079 name: Cow::Borrowed("search_skills"),
2080 title: None,
2081 description: Some(Cow::Borrowed("Search for relevant skills and tools using natural language. Uses semantic vector search to find the best matching tools for your task.")),
2082 input_schema: Arc::new(search_schema),
2083 output_schema: None,
2084 annotations: None,
2085 icons: None,
2086 meta: None,
2087 };
2088
2089 ToolRoute::new_dyn(tool, |ctx: ToolCallContext<'_, McpServer>| {
2090 async move {
2091 let args = ctx.arguments.clone().unwrap_or_default();
2092 let request: SearchSkillsRequest = serde_json::from_value(serde_json::Value::Object(args))
2093 .map_err(|e| McpError::invalid_params(format!("Invalid parameters: {}", e), None))?;
2094
2095 let output = ctx.service.search_skills(&request.query, request.top_k).await
2096 .map_err(|e| McpError::internal_error(format!("Search failed: {}", e), None))?;
2097
2098 Ok(CallToolResult::success(vec![Content::text(output)]))
2099 }.boxed()
2100 })
2101}
2102
2103fn generate_examples_tool_route() -> ToolRoute<McpServer> {
2105 use futures::FutureExt;
2106 use rmcp::handler::server::tool::ToolCallContext;
2107
2108 let schema: serde_json::Map<String, serde_json::Value> = serde_json::from_value(serde_json::json!({
2109 "type": "object",
2110 "properties": {
2111 "skill": {
2112 "type": "string",
2113 "description": "The skill name to generate examples for (e.g., 'kubernetes', 'aws')"
2114 },
2115 "tool": {
2116 "type": "string",
2117 "description": "Optional tool name within the skill. If not provided, generates for all tools."
2118 },
2119 "count": {
2120 "type": "integer",
2121 "description": "Number of examples to generate per tool (default: 5)",
2122 "default": 5
2123 }
2124 },
2125 "required": ["skill"]
2126 })).unwrap();
2127
2128 let tool = Tool {
2129 name: Cow::Borrowed("generate_examples"),
2130 title: None,
2131 description: Some(Cow::Borrowed(
2132 "Generate AI-powered usage examples for a skill's tools. \
2133 Uses LLMs to create realistic command examples with explanations. \
2134 Requires AI ingestion to be enabled (use `skill setup` to configure)."
2135 )),
2136 input_schema: Arc::new(schema),
2137 output_schema: None,
2138 annotations: None,
2139 icons: None,
2140 meta: None,
2141 };
2142
2143 ToolRoute::new_dyn(tool, |ctx: ToolCallContext<'_, McpServer>| {
2144 async move {
2145 let args = ctx.arguments.clone().unwrap_or_default();
2146 let request: GenerateExamplesRequest = serde_json::from_value(serde_json::Value::Object(args))
2147 .map_err(|e| McpError::invalid_params(format!("Invalid parameters: {}", e), None))?;
2148
2149 let output = ctx.service.generate_examples(
2150 &request.skill,
2151 request.tool.as_deref(),
2152 request.count,
2153 ).await
2154 .map_err(|e| McpError::internal_error(format!("Example generation failed: {}", e), None))?;
2155
2156 Ok(CallToolResult::success(vec![Content::text(output)]))
2157 }.boxed()
2158 })
2159}