1use std::env;
10use std::time::Duration;
11
12use serde_json::{json, Value};
13use thiserror::Error;
14use tracing::warn;
15
16use apcore::module::ModuleAnnotations;
17
18use crate::types::ScannedModule;
19
20const DEFAULT_ENDPOINT: &str = "http://localhost:11434/v1";
21const DEFAULT_MODEL: &str = "qwen:0.6b";
22const DEFAULT_THRESHOLD: f64 = 0.7;
23const DEFAULT_BATCH_SIZE: usize = 5;
24const DEFAULT_TIMEOUT: u64 = 30;
25
26#[derive(Debug, Error)]
28pub enum AIEnhancerError {
29 #[error("invalid config: {0}")]
31 Config(String),
32 #[error("connection failed: {0}")]
34 Connection(String),
35 #[error("bad response: {0}")]
37 Response(String),
38}
39
40pub trait Enhancer {
42 fn enhance(&self, modules: Vec<ScannedModule>) -> Vec<ScannedModule>;
44}
45
46#[derive(Debug)]
56pub struct AIEnhancer {
57 pub endpoint: String,
58 pub model: String,
59 pub threshold: f64,
60 pub batch_size: usize,
61 pub timeout: u64,
62}
63
64impl AIEnhancer {
65 pub fn new(
69 endpoint: Option<String>,
70 model: Option<String>,
71 threshold: Option<f64>,
72 batch_size: Option<usize>,
73 timeout: Option<u64>,
74 ) -> Result<Self, AIEnhancerError> {
75 let endpoint = endpoint.unwrap_or_else(|| {
76 env::var("APCORE_AI_ENDPOINT").unwrap_or_else(|_| DEFAULT_ENDPOINT.into())
77 });
78 let model = model.unwrap_or_else(|| {
79 env::var("APCORE_AI_MODEL").unwrap_or_else(|_| DEFAULT_MODEL.into())
80 });
81 let threshold =
82 threshold.unwrap_or_else(|| parse_float_env("APCORE_AI_THRESHOLD", DEFAULT_THRESHOLD));
83 let batch_size = batch_size
84 .unwrap_or_else(|| parse_usize_env("APCORE_AI_BATCH_SIZE", DEFAULT_BATCH_SIZE));
85 let timeout =
86 timeout.unwrap_or_else(|| parse_u64_env("APCORE_AI_TIMEOUT", DEFAULT_TIMEOUT));
87
88 if !(0.0..=1.0).contains(&threshold) {
89 return Err(AIEnhancerError::Config(
90 "APCORE_AI_THRESHOLD must be between 0.0 and 1.0".into(),
91 ));
92 }
93 if batch_size == 0 {
94 return Err(AIEnhancerError::Config(
95 "APCORE_AI_BATCH_SIZE must be a positive integer".into(),
96 ));
97 }
98 if timeout == 0 {
99 return Err(AIEnhancerError::Config(
100 "APCORE_AI_TIMEOUT must be a positive integer".into(),
101 ));
102 }
103
104 Ok(Self {
105 endpoint,
106 model,
107 threshold,
108 batch_size,
109 timeout,
110 })
111 }
112
113 pub fn is_enabled() -> bool {
115 env::var("APCORE_AI_ENABLED")
116 .map(|v| matches!(v.to_lowercase().as_str(), "true" | "1" | "yes"))
117 .unwrap_or(false)
118 }
119
120 fn identify_gaps(&self, module: &ScannedModule) -> Vec<String> {
122 let mut gaps: Vec<String> = Vec::new();
123
124 if module.description.is_empty() || module.description == module.module_id {
125 gaps.push("description".into());
126 }
127 if module.documentation.is_none() {
128 gaps.push("documentation".into());
129 }
130 if module.annotations.is_none()
131 || module
132 .annotations
133 .as_ref()
134 .is_some_and(is_default_annotations)
135 {
136 gaps.push("annotations".into());
137 }
138 if module
139 .input_schema
140 .get("properties")
141 .and_then(|p| p.as_object())
142 .map(|o| o.is_empty())
143 .unwrap_or(true)
144 {
145 gaps.push("input_schema".into());
146 }
147
148 gaps
149 }
150
151 fn build_prompt(&self, module: &ScannedModule, gaps: &[String]) -> String {
153 let mut parts = vec![
154 "You are analyzing a function to generate metadata for an AI-perceivable module system.".into(),
155 String::new(),
156 format!("Module ID: {}", module.module_id),
157 format!("Target: {}", module.target),
158 ];
159
160 if !module.description.is_empty() {
161 parts.push(format!("Current description: {}", module.description));
162 }
163
164 parts.push(String::new());
165 parts.push("Please provide the following missing metadata as JSON:".into());
166 parts.push("{".into());
167
168 for gap in gaps {
169 match gap.as_str() {
170 "description" => {
171 parts.push(
172 r#" "description": "<≤200 chars, what this function does>","#.into(),
173 );
174 }
175 "documentation" => {
176 parts.push(r#" "documentation": "<detailed Markdown explanation>","#.into());
177 }
178 "annotations" => {
179 parts.push(r#" "annotations": {"#.into());
180 parts.push(r#" "readonly": <true if no side effects>,"#.into());
181 parts.push(r#" "destructive": <true if deletes/overwrites data>,"#.into());
182 parts.push(r#" "idempotent": <true if safe to retry>,"#.into());
183 parts.push(r#" "requires_approval": <true if dangerous operation>,"#.into());
184 parts.push(r#" "open_world": <true if calls external systems>,"#.into());
185 parts
186 .push(r#" "streaming": <true if yields results incrementally>,"#.into());
187 parts.push(r#" "cacheable": <true if results can be cached>,"#.into());
188 parts.push(r#" "cache_ttl": <seconds, 0 for no expiry>,"#.into());
189 parts.push(r#" "cache_key_fields": <list of input field names for cache key, or null for all>,"#.into());
190 parts.push(r#" "paginated": <true if supports pagination>,"#.into());
191 parts
192 .push(r#" "pagination_style": <"cursor" or "offset" or "page">"#.into());
193 parts.push(" },".into());
194 }
195 "input_schema" => {
196 parts.push(
197 r#" "input_schema": <JSON Schema object for function parameters>,"#.into(),
198 );
199 }
200 _ => {}
201 }
202 }
203
204 parts.push(r#" "confidence": {"#.into());
205 parts.push(r#" "description": 0.0, "documentation": 0.0"#.into());
206 parts.push(" }".into());
207 parts.push("}".into());
208 parts.push(String::new());
209 parts.push("Respond with ONLY valid JSON, no markdown fences or explanation.".into());
210
211 parts.join("\n")
212 }
213
214 fn call_llm(&self, prompt: &str) -> Result<String, AIEnhancerError> {
216 let url = format!("{}/chat/completions", self.endpoint.trim_end_matches('/'));
217 let payload = json!({
218 "model": self.model,
219 "messages": [{"role": "user", "content": prompt}],
220 "temperature": 0.1,
221 });
222
223 let agent = ureq::Agent::config_builder()
224 .timeout_global(Some(Duration::from_secs(self.timeout)))
225 .build()
226 .new_agent();
227
228 let body: Value = agent
229 .post(&url)
230 .header("Content-Type", "application/json")
231 .send_json(&payload)
232 .map_err(|e| AIEnhancerError::Connection(format!("Failed to reach SLM at {url}: {e}")))?
233 .body_mut()
234 .read_json()
235 .map_err(|e| AIEnhancerError::Response(format!("Failed to parse SLM response: {e}")))?;
236
237 body["choices"][0]["message"]["content"]
238 .as_str()
239 .map(|s| s.to_string())
240 .ok_or_else(|| AIEnhancerError::Response("Unexpected API response structure".into()))
241 }
242
243 fn parse_response(response: &str) -> Result<Value, AIEnhancerError> {
245 let mut text = response.trim().to_string();
246
247 if text.starts_with("```") {
249 let lines: Vec<&str> = text.split('\n').collect();
250 let start = if lines[0].starts_with("```") { 1 } else { 0 };
251 let end = if lines.last().map(|l| l.trim()) == Some("```") {
252 lines.len() - 1
253 } else {
254 lines.len()
255 };
256 text = lines[start..end].join("\n");
257 }
258
259 serde_json::from_str(&text)
260 .map_err(|e| AIEnhancerError::Response(format!("SLM returned invalid JSON: {e}")))
261 }
262
263 fn enhance_module(
265 &self,
266 module: &ScannedModule,
267 gaps: &[String],
268 ) -> Result<ScannedModule, AIEnhancerError> {
269 let prompt = self.build_prompt(module, gaps);
270 let response = self.call_llm(&prompt)?;
271 let parsed = Self::parse_response(&response)?;
272
273 let mut result = module.clone();
274 let mut confidence: serde_json::Map<String, Value> = serde_json::Map::new();
275
276 if gaps.iter().any(|g| g == "description") {
278 if let Some(desc) = parsed.get("description").and_then(|v| v.as_str()) {
279 let conf = parsed
280 .get("confidence")
281 .and_then(|c| c.get("description"))
282 .and_then(|v| v.as_f64())
283 .unwrap_or(0.0);
284 confidence.insert("description".into(), json!(conf));
285 if conf >= self.threshold {
286 result.description = desc.to_string();
287 } else {
288 result.warnings.push(format!(
289 "Low confidence ({conf:.2}) for description — skipped. Review manually."
290 ));
291 }
292 }
293 }
294
295 if gaps.iter().any(|g| g == "documentation") {
297 if let Some(doc) = parsed.get("documentation").and_then(|v| v.as_str()) {
298 let conf = parsed
299 .get("confidence")
300 .and_then(|c| c.get("documentation"))
301 .and_then(|v| v.as_f64())
302 .unwrap_or(0.0);
303 confidence.insert("documentation".into(), json!(conf));
304 if conf >= self.threshold {
305 result.documentation = Some(doc.to_string());
306 } else {
307 result.warnings.push(format!(
308 "Low confidence ({conf:.2}) for documentation — skipped. Review manually."
309 ));
310 }
311 }
312 }
313
314 if gaps.iter().any(|g| g == "annotations") {
316 if let Some(ann_data) = parsed.get("annotations").and_then(|v| v.as_object()) {
317 let ann_conf = parsed
318 .get("confidence")
319 .and_then(|v| v.as_object())
320 .cloned()
321 .unwrap_or_default();
322 let mut base = module.annotations.clone().unwrap_or_default();
323 let mut any_accepted = false;
324
325 let bool_fields = [
327 "readonly",
328 "destructive",
329 "idempotent",
330 "requires_approval",
331 "open_world",
332 "streaming",
333 "cacheable",
334 "paginated",
335 ];
336 for field in &bool_fields {
337 if let Some(val) = ann_data.get(*field).and_then(|v| v.as_bool()) {
338 let field_conf = get_annotation_confidence(&ann_conf, field);
339 confidence.insert(format!("annotations.{field}"), json!(field_conf));
340 if field_conf >= self.threshold {
341 set_bool_annotation(&mut base, field, val);
342 any_accepted = true;
343 } else {
344 result.warnings.push(format!(
345 "Low confidence ({field_conf:.2}) for annotations.{field} — skipped. Review manually."
346 ));
347 }
348 }
349 }
350
351 if let Some(val) = ann_data.get("cache_ttl").and_then(|v| v.as_u64()) {
353 let field_conf = get_annotation_confidence(&ann_conf, "cache_ttl");
354 confidence.insert("annotations.cache_ttl".into(), json!(field_conf));
355 if field_conf >= self.threshold {
356 base.cache_ttl = val;
357 any_accepted = true;
358 } else {
359 result.warnings.push(format!(
360 "Low confidence ({field_conf:.2}) for annotations.cache_ttl — skipped. Review manually."
361 ));
362 }
363 }
364
365 if let Some(val) = ann_data.get("pagination_style").and_then(|v| v.as_str()) {
367 let field_conf = get_annotation_confidence(&ann_conf, "pagination_style");
368 confidence.insert("annotations.pagination_style".into(), json!(field_conf));
369 if field_conf >= self.threshold {
370 base.pagination_style = val.to_string();
371 any_accepted = true;
372 } else {
373 result.warnings.push(format!(
374 "Low confidence ({field_conf:.2}) for annotations.pagination_style — skipped. Review manually."
375 ));
376 }
377 }
378
379 if let Some(arr) = ann_data.get("cache_key_fields").and_then(|v| v.as_array()) {
381 let field_conf = get_annotation_confidence(&ann_conf, "cache_key_fields");
382 confidence.insert("annotations.cache_key_fields".into(), json!(field_conf));
383 if field_conf >= self.threshold {
384 let keys: Vec<String> = arr
385 .iter()
386 .filter_map(|v| v.as_str().map(|s| s.to_string()))
387 .collect();
388 base.cache_key_fields = Some(keys);
389 any_accepted = true;
390 } else {
391 result.warnings.push(format!(
392 "Low confidence ({field_conf:.2}) for annotations.cache_key_fields — skipped. Review manually."
393 ));
394 }
395 }
396
397 if any_accepted {
398 result.annotations = Some(base);
399 }
400 }
401 }
402
403 if gaps.iter().any(|g| g == "input_schema") {
405 if let Some(schema) = parsed.get("input_schema") {
406 let conf = parsed
407 .get("confidence")
408 .and_then(|c| c.get("input_schema"))
409 .and_then(|v| v.as_f64())
410 .unwrap_or(0.0);
411 confidence.insert("input_schema".into(), json!(conf));
412 if conf >= self.threshold {
413 result.input_schema = schema.clone();
414 } else {
415 result.warnings.push(format!(
416 "Low confidence ({conf:.2}) for input_schema — skipped. Review manually."
417 ));
418 }
419 }
420 }
421
422 if !confidence.is_empty() {
424 result
425 .metadata
426 .insert("x-generated-by".into(), Value::String("slm".into()));
427 result
428 .metadata
429 .insert("x-ai-confidence".into(), Value::Object(confidence));
430 }
431
432 Ok(result)
433 }
434}
435
436impl Enhancer for AIEnhancer {
437 fn enhance(&self, modules: Vec<ScannedModule>) -> Vec<ScannedModule> {
438 let mut results: Vec<ScannedModule> = Vec::with_capacity(modules.len());
439
440 let mut pending: Vec<(usize, Vec<String>)> = Vec::new();
441 for (idx, module) in modules.iter().enumerate() {
442 let gaps = self.identify_gaps(module);
443 results.push(module.clone());
444 if !gaps.is_empty() {
445 pending.push((idx, gaps));
446 }
447 }
448
449 for batch in pending.chunks(self.batch_size) {
450 for (idx, gaps) in batch {
451 match self.enhance_module(&modules[*idx], gaps) {
452 Ok(enhanced) => results[*idx] = enhanced,
453 Err(e) => {
454 warn!("AI enhancement failed for {}: {e}", modules[*idx].module_id);
455 }
456 }
457 }
458 }
459
460 results
461 }
462}
463
464fn is_default_annotations(ann: &ModuleAnnotations) -> bool {
466 let d = ModuleAnnotations::default();
467 ann.readonly == d.readonly
468 && ann.destructive == d.destructive
469 && ann.idempotent == d.idempotent
470 && ann.requires_approval == d.requires_approval
471 && ann.open_world == d.open_world
472 && ann.streaming == d.streaming
473 && ann.cacheable == d.cacheable
474 && ann.cache_ttl == d.cache_ttl
475 && ann.cache_key_fields == d.cache_key_fields
476 && ann.paginated == d.paginated
477 && ann.pagination_style == d.pagination_style
478}
479
480fn get_annotation_confidence(conf: &serde_json::Map<String, Value>, field: &str) -> f64 {
482 conf.get(&format!("annotations.{field}"))
483 .or_else(|| conf.get(field))
484 .and_then(|v| v.as_f64())
485 .unwrap_or(0.0)
486}
487
488fn set_bool_annotation(ann: &mut ModuleAnnotations, field: &str, value: bool) {
490 match field {
491 "readonly" => ann.readonly = value,
492 "destructive" => ann.destructive = value,
493 "idempotent" => ann.idempotent = value,
494 "requires_approval" => ann.requires_approval = value,
495 "open_world" => ann.open_world = value,
496 "streaming" => ann.streaming = value,
497 "cacheable" => ann.cacheable = value,
498 "paginated" => ann.paginated = value,
499 _ => {}
500 }
501}
502
503fn parse_float_env(name: &str, default: f64) -> f64 {
504 env::var(name)
505 .ok()
506 .and_then(|v| v.parse().ok())
507 .unwrap_or(default)
508}
509
510fn parse_usize_env(name: &str, default: usize) -> usize {
511 env::var(name)
512 .ok()
513 .and_then(|v| v.parse().ok())
514 .unwrap_or(default)
515}
516
517fn parse_u64_env(name: &str, default: u64) -> u64 {
518 env::var(name)
519 .ok()
520 .and_then(|v| v.parse().ok())
521 .unwrap_or(default)
522}
523
524#[cfg(test)]
525mod tests {
526 use super::*;
527 use apcore::module::ModuleAnnotations;
528 use serde_json::json;
529
530 #[test]
531 fn test_ai_enhancer_new_defaults() {
532 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
533 assert_eq!(enhancer.endpoint, DEFAULT_ENDPOINT);
534 assert_eq!(enhancer.model, DEFAULT_MODEL);
535 assert!((enhancer.threshold - DEFAULT_THRESHOLD).abs() < f64::EPSILON);
536 assert_eq!(enhancer.batch_size, DEFAULT_BATCH_SIZE);
537 assert_eq!(enhancer.timeout, DEFAULT_TIMEOUT);
538 }
539
540 #[test]
541 fn test_ai_enhancer_new_with_overrides() {
542 let enhancer = AIEnhancer::new(
543 Some("http://custom:8080".into()),
544 Some("llama3".into()),
545 Some(0.5),
546 Some(10),
547 Some(60),
548 )
549 .unwrap();
550 assert_eq!(enhancer.endpoint, "http://custom:8080");
551 assert_eq!(enhancer.model, "llama3");
552 assert!((enhancer.threshold - 0.5).abs() < f64::EPSILON);
553 }
554
555 #[test]
556 fn test_ai_enhancer_threshold_validation() {
557 let result = AIEnhancer::new(None, None, Some(1.5), None, None);
558 assert!(result.is_err());
559 }
560
561 #[test]
562 fn test_ai_enhancer_batch_size_validation() {
563 let result = AIEnhancer::new(None, None, None, Some(0), None);
564 assert!(result.is_err());
565 }
566
567 #[test]
568 fn test_identify_gaps_complete_module() {
569 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
570 let mut module = ScannedModule::new(
571 "test".into(),
572 "A real description".into(),
573 json!({"type": "object", "properties": {"x": {"type": "string"}}}),
574 json!({}),
575 vec![],
576 "app:func".into(),
577 );
578 module.documentation = Some("Full docs".into());
579 module.annotations = Some(ModuleAnnotations {
580 readonly: true,
581 ..Default::default()
582 });
583 let gaps = enhancer.identify_gaps(&module);
584 assert!(gaps.is_empty());
585 }
586
587 #[test]
588 fn test_identify_gaps_missing_fields() {
589 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
590 let module = ScannedModule::new(
591 "test".into(),
592 String::new(),
593 json!({"type": "object"}),
594 json!({}),
595 vec![],
596 "app:func".into(),
597 );
598 let gaps = enhancer.identify_gaps(&module);
599 assert!(gaps.iter().any(|g| g == "description"));
600 assert!(gaps.iter().any(|g| g == "documentation"));
601 assert!(gaps.iter().any(|g| g == "annotations"));
602 assert!(gaps.iter().any(|g| g == "input_schema"));
603 }
604
605 #[test]
606 fn test_parse_response_valid_json() {
607 let response = r#"{"description": "hello", "confidence": {"description": 0.9}}"#;
608 let result = AIEnhancer::parse_response(response).unwrap();
609 assert_eq!(result["description"], "hello");
610 }
611
612 #[test]
613 fn test_parse_response_with_fences() {
614 let response = "```json\n{\"key\": \"value\"}\n```";
615 let result = AIEnhancer::parse_response(response).unwrap();
616 assert_eq!(result["key"], "value");
617 }
618
619 #[test]
620 fn test_parse_response_invalid() {
621 let result = AIEnhancer::parse_response("not json");
622 assert!(result.is_err());
623 }
624
625 #[test]
626 fn test_is_enabled_default() {
627 env::remove_var("APCORE_AI_ENABLED");
629 assert!(!AIEnhancer::is_enabled());
630 }
631
632 #[test]
633 fn test_build_prompt_contains_module_info() {
634 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
635 let module = ScannedModule::new(
636 "users.get".into(),
637 "Get user".into(),
638 json!({}),
639 json!({}),
640 vec![],
641 "app:get_user".into(),
642 );
643 let prompt = enhancer.build_prompt(&module, &["description".into()]);
644 assert!(prompt.contains("users.get"));
645 assert!(prompt.contains("app:get_user"));
646 assert!(prompt.contains("description"));
647 }
648
649 #[test]
650 fn test_identify_gaps_description_equals_module_id() {
651 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
652 let module = ScannedModule::new(
653 "my_module".into(),
654 "my_module".into(), json!({"type": "object", "properties": {"x": {"type": "string"}}}),
656 json!({}),
657 vec![],
658 "app:func".into(),
659 );
660 let gaps = enhancer.identify_gaps(&module);
661 assert!(
662 gaps.iter().any(|g| g == "description"),
663 "description matching module_id should be identified as a gap"
664 );
665 }
666
667 #[test]
668 fn test_ai_enhancer_timeout_validation() {
669 let result = AIEnhancer::new(None, None, None, None, Some(0));
670 assert!(result.is_err());
671 let err = result.unwrap_err();
672 assert!(err
673 .to_string()
674 .contains("APCORE_AI_TIMEOUT must be a positive integer"));
675 }
676
677 #[test]
680 fn test_is_enabled_variants() {
681 use std::sync::Mutex;
682 static ENV_LOCK: Mutex<()> = Mutex::new(());
683 let _guard = ENV_LOCK.lock().unwrap();
684
685 unsafe { env::remove_var("APCORE_AI_ENABLED") };
687 assert!(!AIEnhancer::is_enabled(), "should be disabled by default");
688
689 unsafe { env::set_var("APCORE_AI_ENABLED", "true") };
691 assert!(AIEnhancer::is_enabled(), "\"true\" should enable");
692
693 unsafe { env::set_var("APCORE_AI_ENABLED", "yes") };
695 assert!(AIEnhancer::is_enabled(), "\"yes\" should enable");
696
697 unsafe { env::set_var("APCORE_AI_ENABLED", "1") };
699 assert!(AIEnhancer::is_enabled(), "\"1\" should enable");
700
701 unsafe { env::set_var("APCORE_AI_ENABLED", "false") };
703 assert!(!AIEnhancer::is_enabled(), "\"false\" should disable");
704
705 unsafe { env::remove_var("APCORE_AI_ENABLED") };
707 }
708
709 #[test]
710 fn test_parse_response_strips_json_fence() {
711 let response = "```json\n{\"description\": \"hello world\"}\n```";
712 let result = AIEnhancer::parse_response(response).unwrap();
713 assert_eq!(result["description"], "hello world");
714 }
715
716 #[test]
717 fn test_build_prompt_requests_annotations() {
718 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
719 let module = ScannedModule::new(
720 "test".into(),
721 "desc".into(),
722 json!({}),
723 json!({}),
724 vec![],
725 "app:func".into(),
726 );
727 let prompt = enhancer.build_prompt(&module, &["annotations".into()]);
728 assert!(
729 prompt.contains("readonly"),
730 "prompt should mention annotations fields"
731 );
732 assert!(prompt.contains("destructive"));
733 assert!(prompt.contains("idempotent"));
734 }
735
736 #[test]
737 fn test_build_prompt_requests_input_schema() {
738 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
739 let module = ScannedModule::new(
740 "test".into(),
741 "desc".into(),
742 json!({}),
743 json!({}),
744 vec![],
745 "app:func".into(),
746 );
747 let prompt = enhancer.build_prompt(&module, &["input_schema".into()]);
748 assert!(
749 prompt.contains("input_schema"),
750 "prompt should mention input_schema"
751 );
752 assert!(prompt.contains("JSON Schema"));
753 }
754
755 #[test]
756 fn test_build_prompt_requests_documentation() {
757 let enhancer = AIEnhancer::new(None, None, None, None, None).unwrap();
758 let module = ScannedModule::new(
759 "test".into(),
760 "desc".into(),
761 json!({}),
762 json!({}),
763 vec![],
764 "app:func".into(),
765 );
766 let prompt = enhancer.build_prompt(&module, &["documentation".into()]);
767 assert!(
768 prompt.contains("documentation"),
769 "prompt should mention documentation"
770 );
771 assert!(prompt.contains("Markdown"));
772 }
773}