use std::collections::HashMap;
use std::future::Future;
use std::path::{Component, Path, PathBuf};
use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
use tokio::sync::RwLock;
use tower_lsp::jsonrpc::Result;
use tower_lsp::lsp_types::*;
use tower_lsp::{Client, LanguageServer};
use crate::code_actions::fixes_to_code_actions_with_diagnostic;
use crate::completion_provider::completion_items_for_document;
use crate::diagnostic_mapper::{deserialize_fixes, to_lsp_diagnostics};
use crate::hover_provider::hover_at_position;
use crate::vscode_config::VsCodeConfig;
fn create_error_diagnostic(code: &str, message: String) -> Diagnostic {
Diagnostic {
range: Range {
start: Position {
line: 0,
character: 0,
},
end: Position {
line: 0,
character: 0,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: Some(NumberOrString::String(code.to_string())),
code_description: None,
source: Some("agnix".to_string()),
message,
related_information: None,
tags: None,
data: None,
}
}
fn normalize_path(path: &Path) -> PathBuf {
let mut components: Vec<Component<'_>> = Vec::new();
for component in path.components() {
match component {
Component::CurDir => {}
Component::ParentDir => {
match components.last() {
Some(Component::Normal(_)) => {
components.pop();
}
Some(Component::RootDir) | Some(Component::Prefix(_)) => {}
_ => components.push(component),
}
}
_ => components.push(component),
}
}
components.iter().collect()
}
const MAX_CONFIG_REVALIDATION_CONCURRENCY: usize = 8;
fn config_revalidation_concurrency(document_count: usize) -> usize {
if document_count == 0 {
return 0;
}
let available = std::thread::available_parallelism()
.map(|count| count.get())
.unwrap_or(4);
document_count.min(available.clamp(1, MAX_CONFIG_REVALIDATION_CONCURRENCY))
}
async fn for_each_bounded<T, I, F, Fut>(
items: I,
max_concurrency: usize,
operation: F,
) -> Vec<tokio::task::JoinError>
where
T: Send + 'static,
I: IntoIterator<Item = T>,
F: Fn(T) -> Fut + Send + Sync + 'static,
Fut: Future<Output = ()> + Send + 'static,
{
let mut join_set = tokio::task::JoinSet::new();
let mut join_errors = Vec::new();
let mut items = items.into_iter();
let max_concurrency = max_concurrency.max(1);
let operation = Arc::new(operation);
for _ in 0..max_concurrency {
let Some(item) = items.next() else {
break;
};
let operation = Arc::clone(&operation);
join_set.spawn(async move {
operation(item).await;
});
}
while let Some(result) = join_set.join_next().await {
if let Err(error) = result {
join_errors.push(error);
}
if let Some(item) = items.next() {
let operation = Arc::clone(&operation);
join_set.spawn(async move {
operation(item).await;
});
}
}
join_errors
}
#[derive(Clone)]
pub struct Backend {
client: Client,
config: Arc<RwLock<Arc<agnix_core::LintConfig>>>,
workspace_root: Arc<RwLock<Option<PathBuf>>>,
workspace_root_canonical: Arc<RwLock<Option<PathBuf>>>,
documents: Arc<RwLock<HashMap<Url, Arc<String>>>>,
config_generation: Arc<AtomicU64>,
registry: Arc<agnix_core::ValidatorRegistry>,
}
impl Backend {
pub fn new(client: Client) -> Self {
Self {
client,
config: Arc::new(RwLock::new(Arc::new(agnix_core::LintConfig::default()))),
workspace_root: Arc::new(RwLock::new(None)),
workspace_root_canonical: Arc::new(RwLock::new(None)),
documents: Arc::new(RwLock::new(HashMap::new())),
config_generation: Arc::new(AtomicU64::new(0)),
registry: Arc::new(agnix_core::ValidatorRegistry::with_defaults()),
}
}
async fn validate_file(&self, path: PathBuf) -> Vec<Diagnostic> {
let config = Arc::clone(&*self.config.read().await);
let registry = Arc::clone(&self.registry);
let result = tokio::task::spawn_blocking(move || {
agnix_core::validate_file_with_registry(&path, &config, ®istry)
})
.await;
match result {
Ok(Ok(diagnostics)) => to_lsp_diagnostics(diagnostics),
Ok(Err(e)) => vec![create_error_diagnostic(
"agnix::validation-error",
format!("Validation error: {}", e),
)],
Err(e) => vec![create_error_diagnostic(
"agnix::internal-error",
format!("Internal error: {}", e),
)],
}
}
async fn validate_from_content_and_publish(
&self,
uri: Url,
expected_config_generation: Option<u64>,
) {
let file_path = match uri.to_file_path() {
Ok(p) => p,
Err(()) => {
self.client
.log_message(MessageType::WARNING, format!("Invalid file URI: {}", uri))
.await;
return;
}
};
if let Some(ref workspace_root) = *self.workspace_root.read().await {
let (canonical_path, canonical_root) = match file_path.canonicalize() {
Ok(path) => {
let root = self
.workspace_root_canonical
.read()
.await
.clone()
.unwrap_or_else(|| normalize_path(workspace_root));
(path, root)
}
Err(_) => (normalize_path(&file_path), normalize_path(workspace_root)),
};
if !canonical_path.starts_with(&canonical_root) {
self.client
.log_message(
MessageType::WARNING,
format!("File outside workspace boundary: {}", uri),
)
.await;
return;
}
}
let (content, expected_content) = {
let docs = self.documents.read().await;
match docs.get(&uri) {
Some(cached) => {
let snapshot = Arc::clone(cached);
(Arc::clone(&snapshot), Some(snapshot))
}
None => {
drop(docs);
let diagnostics = self.validate_file(file_path).await;
if !self
.should_publish_diagnostics(&uri, expected_config_generation, None)
.await
{
return;
}
self.client
.publish_diagnostics(uri, diagnostics, None)
.await;
return;
}
}
};
let config = Arc::clone(&*self.config.read().await);
let registry = Arc::clone(&self.registry);
let result = tokio::task::spawn_blocking(move || {
let file_type = agnix_core::resolve_file_type(&file_path, &config);
if file_type == agnix_core::FileType::Unknown {
return Ok(vec![]);
}
let validators = registry.validators_for(file_type);
let mut diagnostics = Vec::new();
for validator in validators {
diagnostics.extend(validator.validate(&file_path, content.as_str(), &config));
}
Ok::<_, agnix_core::LintError>(diagnostics)
})
.await;
let diagnostics = match result {
Ok(Ok(diagnostics)) => to_lsp_diagnostics(diagnostics),
Ok(Err(e)) => vec![create_error_diagnostic(
"agnix::validation-error",
format!("Validation error: {}", e),
)],
Err(e) => vec![create_error_diagnostic(
"agnix::internal-error",
format!("Internal error: {}", e),
)],
};
if !self
.should_publish_diagnostics(&uri, expected_config_generation, expected_content.as_ref())
.await
{
return;
}
self.client
.publish_diagnostics(uri, diagnostics, None)
.await;
}
async fn should_publish_diagnostics(
&self,
uri: &Url,
expected_config_generation: Option<u64>,
expected_content: Option<&Arc<String>>,
) -> bool {
let docs = self.documents.read().await;
let current_content = docs.get(uri);
if let Some(expected) = expected_content {
let Some(current) = current_content else {
return false;
};
if !Arc::ptr_eq(current, expected) {
return false;
}
}
if let Some(expected_generation) = expected_config_generation {
if self.config_generation.load(Ordering::SeqCst) != expected_generation {
return false;
}
if current_content.is_none() {
return false;
}
}
true
}
async fn get_document_content(&self, uri: &Url) -> Option<Arc<String>> {
self.documents.read().await.get(uri).cloned()
}
}
#[tower_lsp::async_trait]
impl LanguageServer for Backend {
async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
if let Some(root_uri) = params.root_uri {
if let Ok(root_path) = root_uri.to_file_path() {
*self.workspace_root.write().await = Some(root_path.clone());
*self.workspace_root_canonical.write().await = Some(
root_path
.canonicalize()
.unwrap_or_else(|_| normalize_path(&root_path)),
);
let config_path = root_path.join(".agnix.toml");
if config_path.exists() {
match agnix_core::LintConfig::load(&config_path) {
Ok(loaded_config) => {
if let Some(ref config_locale) = loaded_config.locale {
crate::locale::init_from_config(config_locale);
}
let mut config_with_root = loaded_config;
config_with_root.root_dir = Some(root_path.clone());
*self.config.write().await = Arc::new(config_with_root);
}
Err(e) => {
self.client
.log_message(
MessageType::WARNING,
format!("Failed to load .agnix.toml: {}", e),
)
.await;
}
}
}
}
}
Ok(InitializeResult {
capabilities: ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Kind(
TextDocumentSyncKind::FULL,
)),
code_action_provider: Some(CodeActionProviderCapability::Simple(true)),
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
resolve_provider: Some(false),
trigger_characters: Some(vec![":".to_string(), "\"".to_string()]),
..Default::default()
}),
..Default::default()
},
server_info: Some(ServerInfo {
name: "agnix-lsp".to_string(),
version: Some(env!("CARGO_PKG_VERSION").to_string()),
}),
})
}
async fn initialized(&self, _: InitializedParams) {
self.client
.log_message(MessageType::INFO, "agnix-lsp initialized")
.await;
}
async fn shutdown(&self) -> Result<()> {
Ok(())
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
let uri = params.text_document.uri;
let text = params.text_document.text;
{
let mut docs = self.documents.write().await;
docs.insert(uri.clone(), Arc::new(text));
}
self.validate_from_content_and_publish(uri, None).await;
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
let uri = params.text_document.uri;
if let Some(change) = params.content_changes.into_iter().next() {
{
let mut docs = self.documents.write().await;
docs.insert(uri.clone(), Arc::new(change.text));
}
self.validate_from_content_and_publish(uri, None).await;
}
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {
self.validate_from_content_and_publish(params.text_document.uri, None)
.await;
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
{
let mut docs = self.documents.write().await;
docs.remove(¶ms.text_document.uri);
}
self.client
.publish_diagnostics(params.text_document.uri, vec![], None)
.await;
}
async fn code_action(&self, params: CodeActionParams) -> Result<Option<CodeActionResponse>> {
let uri = ¶ms.text_document.uri;
let content = match self.get_document_content(uri).await {
Some(c) => c,
None => return Ok(None),
};
let mut actions = Vec::new();
for diag in ¶ms.context.diagnostics {
let diag_range = &diag.range;
let req_range = ¶ms.range;
let overlaps = diag_range.start.line <= req_range.end.line
&& diag_range.end.line >= req_range.start.line;
if !overlaps {
continue;
}
let fixes = deserialize_fixes(diag.data.as_ref());
if !fixes.is_empty() {
actions.extend(fixes_to_code_actions_with_diagnostic(
uri,
&fixes,
content.as_str(),
diag,
));
}
}
if actions.is_empty() {
Ok(None)
} else {
Ok(Some(
actions
.into_iter()
.map(CodeActionOrCommand::CodeAction)
.collect(),
))
}
}
async fn hover(&self, params: HoverParams) -> Result<Option<Hover>> {
let uri = ¶ms.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let content = match self.get_document_content(uri).await {
Some(c) => c,
None => return Ok(None),
};
let config = self.config.read().await;
let file_type = uri
.to_file_path()
.ok()
.map(|path| agnix_core::resolve_file_type(&path, &config))
.unwrap_or(agnix_core::FileType::Unknown);
if matches!(file_type, agnix_core::FileType::Unknown) {
return Ok(None);
}
Ok(hover_at_position(file_type, content.as_str(), position))
}
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
let uri = ¶ms.text_document_position.text_document.uri;
let position = params.text_document_position.position;
let path = match uri.to_file_path() {
Ok(path) => path,
Err(_) => return Ok(None),
};
let content = match self.get_document_content(uri).await {
Some(c) => c,
None => return Ok(None),
};
let config = self.config.read().await;
let items = completion_items_for_document(&path, content.as_str(), position, &config);
if items.is_empty() {
Ok(None)
} else {
Ok(Some(CompletionResponse::Array(items)))
}
}
async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
let vscode_config: VsCodeConfig = match serde_json::from_value(params.settings) {
Ok(c) => c,
Err(e) => {
self.client
.log_message(
MessageType::WARNING,
format!("Failed to parse VS Code settings: {}", e),
)
.await;
return;
}
};
self.client
.log_message(
MessageType::INFO,
"Received configuration update from VS Code",
)
.await;
let revalidation_generation = self.config_generation.fetch_add(1, Ordering::SeqCst) + 1;
{
let mut config_guard = self.config.write().await;
let mut new_config = (**config_guard).clone();
vscode_config.merge_into_lint_config(&mut new_config);
if let Some(ref root) = *self.workspace_root.read().await {
new_config.root_dir = Some(root.clone());
}
*config_guard = Arc::new(new_config);
}
let documents: Vec<Url> = {
let docs = self.documents.read().await;
docs.keys().cloned().collect()
};
if documents.is_empty() {
return;
}
let max_concurrency = config_revalidation_concurrency(documents.len());
let backend = self.clone();
let join_errors = for_each_bounded(documents, max_concurrency, move |uri| {
let backend = backend.clone();
async move {
backend
.validate_from_content_and_publish(uri, Some(revalidation_generation))
.await;
}
})
.await;
for error in join_errors {
self.client
.log_message(
MessageType::ERROR,
format!("Revalidation task failed after config change: {}", error),
)
.await;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use tower_lsp::LspService;
#[tokio::test]
async fn test_backend_new_creates_valid_instance() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_initialize_returns_correct_capabilities() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
let result = service.inner().initialize(init_params).await;
let init_result = result.expect("initialize should succeed");
match init_result.capabilities.text_document_sync {
Some(TextDocumentSyncCapability::Kind(kind)) => {
assert_eq!(kind, TextDocumentSyncKind::FULL);
}
_ => panic!("Expected FULL text document sync capability"),
}
assert!(
init_result.capabilities.completion_provider.is_some(),
"Expected completion provider capability"
);
let server_info = init_result
.server_info
.expect("server_info should be present");
assert_eq!(server_info.name, "agnix-lsp");
assert!(server_info.version.is_some());
}
#[tokio::test]
async fn test_completion_returns_skill_frontmatter_candidates() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
let content = "---\nna\n---\n";
std::fs::write(&skill_path, content).unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: content.to_string(),
},
})
.await;
let completion = service
.inner()
.completion(CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 1,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: None,
})
.await
.unwrap();
let items = match completion {
Some(CompletionResponse::Array(items)) => items,
_ => panic!("Expected completion items"),
};
assert!(items.iter().any(|item| item.label == "name"));
}
#[tokio::test]
async fn test_shutdown_returns_ok() {
let (service, _socket) = LspService::new(Backend::new);
let result = service.inner().shutdown().await;
assert!(result.is_ok());
}
#[test]
fn test_validation_error_diagnostic_structure() {
let error_message = "Failed to parse file";
let diagnostic = Diagnostic {
range: Range {
start: Position {
line: 0,
character: 0,
},
end: Position {
line: 0,
character: 0,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: Some(NumberOrString::String(
"agnix::validation-error".to_string(),
)),
code_description: None,
source: Some("agnix".to_string()),
message: format!("Validation error: {}", error_message),
related_information: None,
tags: None,
data: None,
};
assert_eq!(
diagnostic.code,
Some(NumberOrString::String(
"agnix::validation-error".to_string()
))
);
assert_eq!(diagnostic.source, Some("agnix".to_string()));
assert_eq!(diagnostic.severity, Some(DiagnosticSeverity::ERROR));
assert!(diagnostic.message.contains("Validation error:"));
}
#[test]
fn test_internal_error_diagnostic_structure() {
let error_message = "task panicked";
let diagnostic = Diagnostic {
range: Range {
start: Position {
line: 0,
character: 0,
},
end: Position {
line: 0,
character: 0,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: Some(NumberOrString::String("agnix::internal-error".to_string())),
code_description: None,
source: Some("agnix".to_string()),
message: format!("Internal error: {}", error_message),
related_information: None,
tags: None,
data: None,
};
assert_eq!(
diagnostic.code,
Some(NumberOrString::String("agnix::internal-error".to_string()))
);
assert_eq!(diagnostic.source, Some("agnix".to_string()));
assert_eq!(diagnostic.severity, Some(DiagnosticSeverity::ERROR));
assert!(diagnostic.message.contains("Internal error:"));
}
#[test]
fn test_invalid_uri_detection() {
let http_uri = Url::parse("http://example.com/file.md").unwrap();
assert!(http_uri.to_file_path().is_err());
let data_uri = Url::parse("data:text/plain;base64,SGVsbG8=").unwrap();
assert!(data_uri.to_file_path().is_err());
#[cfg(windows)]
let file_uri = Url::parse("file:///C:/tmp/test.md").unwrap();
#[cfg(not(windows))]
let file_uri = Url::parse("file:///tmp/test.md").unwrap();
assert!(file_uri.to_file_path().is_ok());
}
#[tokio::test]
async fn test_validate_file_valid_skill() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
This is a valid skill.
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(), },
})
.await;
}
#[tokio::test]
async fn test_validate_file_invalid_skill() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: Invalid Name With Spaces
version: 1.0.0
model: sonnet
---
# Invalid Skill
This skill has an invalid name.
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_did_save_triggers_validation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_save(DidSaveTextDocumentParams {
text_document: TextDocumentIdentifier { uri },
text: None,
})
.await;
}
#[tokio::test]
async fn test_did_close_clears_diagnostics() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Test").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri },
})
.await;
}
#[tokio::test]
async fn test_initialized_completes() {
let (service, _socket) = LspService::new(Backend::new);
service.inner().initialized(InitializedParams {}).await;
}
#[tokio::test]
async fn test_non_file_uri_handled_gracefully() {
let (service, _socket) = LspService::new(Backend::new);
let http_uri = Url::parse("http://example.com/test.md").unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: http_uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_validate_nonexistent_file() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let nonexistent_path = temp_dir.path().join("nonexistent.md");
let uri = Url::from_file_path(&nonexistent_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_server_info_version() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
let result = service.inner().initialize(init_params).await.unwrap();
let server_info = result.server_info.unwrap();
let version = server_info.version.unwrap();
assert!(!version.is_empty());
assert!(version.contains('.'));
}
#[tokio::test]
async fn test_initialize_captures_workspace_root() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_initialize_loads_config_from_file() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let config_path = temp_dir.path().join(".agnix.toml");
std::fs::write(
&config_path,
r#"
severity = "Warning"
target = "ClaudeCode"
exclude = []
[rules]
skills = false
"#,
)
.unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_initialize_handles_invalid_config() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let config_path = temp_dir.path().join(".agnix.toml");
std::fs::write(&config_path, "this is not valid toml [[[").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_file_within_workspace_validated() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
service.inner().initialize(init_params).await.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_file_outside_workspace_rejected() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let outside_dir = tempfile::tempdir().unwrap();
let outside_file = outside_dir.path().join("SKILL.md");
std::fs::write(
&outside_file,
r#"---
name: outside-skill
version: 1.0.0
model: sonnet
---
# Outside Skill
"#,
)
.unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
service.inner().initialize(init_params).await.unwrap();
let uri = Url::from_file_path(&outside_file).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_validation_without_workspace_root() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
service.inner().initialize(init_params).await.unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_cached_config_used_for_multiple_validations() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let temp_dir = tempfile::tempdir().unwrap();
for i in 0..3 {
let skill_path = temp_dir.path().join(format!("skill{}/SKILL.md", i));
std::fs::create_dir_all(skill_path.parent().unwrap()).unwrap();
std::fs::write(
&skill_path,
format!(
r#"---
name: test-skill-{}
version: 1.0.0
model: sonnet
---
# Test Skill {}
"#,
i, i
),
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
}
#[tokio::test]
async fn test_cached_registry_used_for_multiple_validations() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let claude_path = temp_dir.path().join("CLAUDE.md");
std::fs::write(
&claude_path,
r#"# Project Memory
This is a test project.
"#,
)
.unwrap();
for path in [&skill_path, &claude_path] {
let uri = Url::from_file_path(path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
}
#[tokio::test]
async fn test_document_cache_cleared_on_close() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: test\ndescription: Test\n---\n# Test",
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: test\ndescription: Test\n---\n# Test".to_string(),
},
})
.await;
let hover_before = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover_before.is_ok());
assert!(hover_before.unwrap().is_some());
service
.inner()
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
})
.await;
let hover_after = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover_after.is_ok());
assert!(hover_after.unwrap().is_none());
}
#[tokio::test]
async fn test_document_cache_updated_on_change() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Initial").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Initial".to_string(),
},
})
.await;
service
.inner()
.did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 2,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: "---\nname: updated\ndescription: Updated\n---\n# Updated".to_string(),
}],
})
.await;
let hover = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover.is_ok());
assert!(hover.unwrap().is_some());
}
#[tokio::test]
async fn test_get_document_content_returns_shared_arc() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Shared").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Shared".to_string(),
},
})
.await;
let first = service
.inner()
.get_document_content(&uri)
.await
.expect("cached content should exist");
let second = service
.inner()
.get_document_content(&uri)
.await
.expect("cached content should exist");
assert!(Arc::ptr_eq(&first, &second));
}
#[tokio::test]
async fn test_multiple_documents_independent_caches() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill1_path = temp_dir.path().join("skill1").join("SKILL.md");
let skill2_path = temp_dir.path().join("skill2").join("SKILL.md");
std::fs::create_dir_all(skill1_path.parent().unwrap()).unwrap();
std::fs::create_dir_all(skill2_path.parent().unwrap()).unwrap();
std::fs::write(
&skill1_path,
"---\nname: skill-one\ndescription: First\n---\n# One",
)
.unwrap();
std::fs::write(
&skill2_path,
"---\nname: skill-two\ndescription: Second\n---\n# Two",
)
.unwrap();
let uri1 = Url::from_file_path(&skill1_path).unwrap();
let uri2 = Url::from_file_path(&skill2_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri1.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: skill-one\ndescription: First\n---\n# One".to_string(),
},
})
.await;
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri2.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: skill-two\ndescription: Second\n---\n# Two".to_string(),
},
})
.await;
service
.inner()
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri: uri1.clone() },
})
.await;
let hover1 = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri1 },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover1.is_ok());
assert!(hover1.unwrap().is_none());
let hover2 = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri2 },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover2.is_ok());
assert!(hover2.unwrap().is_some());
}
#[tokio::test]
async fn test_did_change_configuration_valid_settings() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"severity": "Error",
"target": "ClaudeCode",
"rules": {
"skills": false,
"hooks": true
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_partial_settings() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"severity": "Info"
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_invalid_json() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!("not an object");
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[test]
fn test_config_revalidation_concurrency_bounds() {
let expected_cap = std::thread::available_parallelism()
.map(|count| count.get())
.unwrap_or(4)
.clamp(1, MAX_CONFIG_REVALIDATION_CONCURRENCY);
assert_eq!(config_revalidation_concurrency(0), 0);
assert_eq!(config_revalidation_concurrency(1), 1);
assert_eq!(
config_revalidation_concurrency(MAX_CONFIG_REVALIDATION_CONCURRENCY * 4),
expected_cap
);
}
#[tokio::test]
async fn test_for_each_bounded_empty_input() {
let errors = for_each_bounded(Vec::<usize>::new(), 3, |_| async {}).await;
assert!(errors.is_empty());
}
#[tokio::test]
async fn test_for_each_bounded_collects_join_errors() {
let errors = for_each_bounded(vec![0usize, 1, 2], 2, |idx| async move {
if idx == 1 {
panic!("intentional panic for join error coverage");
}
})
.await;
assert_eq!(errors.len(), 1);
assert!(errors[0].is_panic());
}
#[tokio::test]
async fn test_should_publish_diagnostics_guard() {
let (service, _socket) = LspService::new(Backend::new);
let backend = service.inner();
let temp_dir = tempfile::tempdir().unwrap();
let path = temp_dir.path().join("SKILL.md");
std::fs::write(&path, "# test").unwrap();
let uri = Url::from_file_path(&path).unwrap();
let snapshot = Arc::new("# test".to_string());
backend
.documents
.write()
.await
.insert(uri.clone(), Arc::clone(&snapshot));
backend.config_generation.store(7, Ordering::SeqCst);
assert!(
backend
.should_publish_diagnostics(&uri, Some(7), Some(&snapshot))
.await
);
assert!(
!backend
.should_publish_diagnostics(&uri, Some(6), Some(&snapshot))
.await
);
backend
.documents
.write()
.await
.insert(uri.clone(), Arc::new("# updated".to_string()));
assert!(
!backend
.should_publish_diagnostics(&uri, Some(7), Some(&snapshot))
.await
);
backend.documents.write().await.remove(&uri);
assert!(
!backend
.should_publish_diagnostics(&uri, Some(7), Some(&snapshot))
.await
);
assert!(backend.should_publish_diagnostics(&uri, None, None).await);
}
#[tokio::test]
async fn test_did_change_configuration_concurrency_bound_helper() {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::time::Duration;
use tokio::sync::Barrier;
let max_concurrency = 3usize;
let in_flight = Arc::new(AtomicUsize::new(0));
let peak_in_flight = Arc::new(AtomicUsize::new(0));
let completed = Arc::new(AtomicUsize::new(0));
let ready = Arc::new(Barrier::new(max_concurrency + 1));
let release = Arc::new(Barrier::new(max_concurrency + 1));
let total_items = 12usize;
let run = tokio::spawn(for_each_bounded(0..total_items, max_concurrency, {
let in_flight = Arc::clone(&in_flight);
let peak_in_flight = Arc::clone(&peak_in_flight);
let completed = Arc::clone(&completed);
let ready = Arc::clone(&ready);
let release = Arc::clone(&release);
move |idx| {
let in_flight = Arc::clone(&in_flight);
let peak_in_flight = Arc::clone(&peak_in_flight);
let completed = Arc::clone(&completed);
let ready = Arc::clone(&ready);
let release = Arc::clone(&release);
async move {
let current = in_flight.fetch_add(1, Ordering::SeqCst) + 1;
peak_in_flight.fetch_max(current, Ordering::SeqCst);
if idx < max_concurrency {
ready.wait().await;
release.wait().await;
} else {
tokio::task::yield_now().await;
}
in_flight.fetch_sub(1, Ordering::SeqCst);
completed.fetch_add(1, Ordering::SeqCst);
}
}
}));
tokio::time::timeout(Duration::from_secs(2), ready.wait())
.await
.expect("timed out waiting for first wave");
assert_eq!(peak_in_flight.load(Ordering::SeqCst), max_concurrency);
tokio::time::timeout(Duration::from_secs(2), release.wait())
.await
.expect("timed out releasing first wave");
let join_errors = tokio::time::timeout(Duration::from_secs(2), run)
.await
.expect("timed out waiting for bounded worker completion")
.unwrap();
assert!(join_errors.is_empty());
assert_eq!(completed.load(Ordering::SeqCst), total_items);
}
#[tokio::test]
async fn test_did_change_configuration_triggers_revalidation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
let settings = serde_json::json!({
"severity": "Error",
"rules": {
"skills": false
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_triggers_revalidation_for_multiple_documents() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let document_count = 6usize;
for i in 0..document_count {
let skill_path = temp_dir.path().join(format!("skill-{i}/SKILL.md"));
std::fs::create_dir_all(skill_path.parent().unwrap()).unwrap();
std::fs::write(
&skill_path,
format!(
r#"---
name: test-skill-{i}
version: 1.0.0
model: sonnet
---
# Test Skill {i}
"#
),
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
}
let settings = serde_json::json!({
"severity": "Error",
"rules": {
"skills": false
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
let open_documents = service.inner().documents.read().await.len();
assert_eq!(open_documents, document_count);
}
#[tokio::test]
async fn test_did_change_configuration_empty_settings() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_versions() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"versions": {
"claude_code": "1.0.0",
"codex": "0.1.0",
"cursor": "0.45.0",
"copilot": "1.2.0"
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_specs() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"specs": {
"mcp_protocol": "2025-06-18",
"agent_skills_spec": "1.0",
"agents_md_spec": "1.0"
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_tools_array() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"tools": ["claude-code", "cursor", "github-copilot"]
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_disabled_rules() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"rules": {
"disabled_rules": ["AS-001", "PE-003", "MCP-008"]
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_locale() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"severity": "Warning",
"locale": "es"
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
assert_eq!(&*rust_i18n::locale(), "es");
rust_i18n::set_locale("en");
}
#[test]
fn test_normalize_path_resolves_parent() {
let result = normalize_path(Path::new("/a/b/../c"));
assert_eq!(result, PathBuf::from("/a/c"));
}
#[test]
fn test_normalize_path_removes_curdir() {
let result = normalize_path(Path::new("/a/./b/./c"));
assert_eq!(result, PathBuf::from("/a/b/c"));
}
#[test]
fn test_normalize_path_multiple_parent() {
let result = normalize_path(Path::new("/a/b/../../c"));
assert_eq!(result, PathBuf::from("/c"));
}
#[test]
fn test_normalize_path_already_clean() {
let result = normalize_path(Path::new("/a/b/c"));
assert_eq!(result, PathBuf::from("/a/b/c"));
}
#[test]
fn test_normalize_path_cannot_escape_root() {
let result = normalize_path(Path::new("/../a"));
assert_eq!(result, PathBuf::from("/a"));
}
#[test]
fn test_normalize_path_root_only() {
let result = normalize_path(Path::new("/"));
assert_eq!(result, PathBuf::from("/"));
}
#[test]
fn test_normalize_path_excessive_parent_traversal() {
let result = normalize_path(Path::new("/a/../../../b"));
assert_eq!(result, PathBuf::from("/b"));
}
#[test]
fn test_normalize_path_mixed_special_components() {
let result = normalize_path(Path::new("/a/./b/../c/./d"));
assert_eq!(result, PathBuf::from("/a/c/d"));
}
#[tokio::test]
async fn test_path_traversal_outside_workspace_rejected() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let outside_dir = tempfile::tempdir().unwrap();
let outside_name = outside_dir
.path()
.file_name()
.expect("should have a file name")
.to_str()
.expect("should be valid UTF-8");
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let traversal_path = workspace_dir
.path()
.join("..")
.join("..")
.join(outside_name)
.join("SKILL.md");
let uri = Url::from_file_path(&traversal_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: evil\n---\n# Evil".to_string(),
},
})
.await;
}
#[tokio::test]
async fn test_path_traversal_inside_workspace_accepted() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let subdir = workspace_dir.path().join("subdir");
std::fs::create_dir(&subdir).unwrap();
let skill_path = workspace_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: test-skill\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Test Skill\n",
)
.unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let traversal_path = workspace_dir
.path()
.join("subdir")
.join("..")
.join("SKILL.md");
let uri = Url::from_file_path(&traversal_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
}
#[tokio::test]
async fn test_nonexistent_file_in_workspace_accepted() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let nonexistent = workspace_dir.path().join("SKILL.md");
let uri = Url::from_file_path(&nonexistent).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: ghost\n---\n# Ghost".to_string(),
},
})
.await;
}
#[tokio::test]
async fn test_dot_components_in_path_accepted() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let skill_path = workspace_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: test-skill\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Test Skill\n",
)
.unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let dot_path = format!("{}/./SKILL.md", workspace_dir.path().display());
let uri = Url::parse(&format!("file://{}", dot_path)).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
}
}