use super::*;
use tower_lsp::LspService;
#[tokio::test]
async fn test_backend_new_creates_valid_instance() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_initialize_returns_correct_capabilities() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
let result = service.inner().initialize(init_params).await;
let init_result = result.expect("initialize should succeed");
match init_result.capabilities.text_document_sync {
Some(TextDocumentSyncCapability::Kind(kind)) => {
assert_eq!(kind, TextDocumentSyncKind::FULL);
}
_ => panic!("Expected FULL text document sync capability"),
}
assert!(
init_result.capabilities.completion_provider.is_some(),
"Expected completion provider capability"
);
let server_info = init_result
.server_info
.expect("server_info should be present");
assert_eq!(server_info.name, "agnix-lsp");
assert!(server_info.version.is_some());
}
#[tokio::test]
async fn test_completion_returns_skill_frontmatter_candidates() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
let content = "---\nna\n---\n";
std::fs::write(&skill_path, content).unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: content.to_string(),
},
})
.await;
let completion = service
.inner()
.completion(CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 1,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: None,
})
.await
.unwrap();
let items = match completion {
Some(CompletionResponse::Array(items)) => items,
_ => panic!("Expected completion items"),
};
assert!(items.iter().any(|item| item.label == "name"));
}
#[tokio::test]
async fn test_shutdown_returns_ok() {
let (service, _socket) = LspService::new(Backend::new);
let result = service.inner().shutdown().await;
assert!(result.is_ok());
}
#[test]
fn test_validation_error_diagnostic_structure() {
let error_message = "Failed to parse file";
let diagnostic = Diagnostic {
range: Range {
start: Position {
line: 0,
character: 0,
},
end: Position {
line: 0,
character: 0,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: Some(NumberOrString::String(
"agnix::validation-error".to_string(),
)),
code_description: None,
source: Some("agnix".to_string()),
message: format!("Validation error: {}", error_message),
related_information: None,
tags: None,
data: None,
};
assert_eq!(
diagnostic.code,
Some(NumberOrString::String(
"agnix::validation-error".to_string()
))
);
assert_eq!(diagnostic.source, Some("agnix".to_string()));
assert_eq!(diagnostic.severity, Some(DiagnosticSeverity::ERROR));
assert!(diagnostic.message.contains("Validation error:"));
}
#[test]
fn test_internal_error_diagnostic_structure() {
let error_message = "task panicked";
let diagnostic = Diagnostic {
range: Range {
start: Position {
line: 0,
character: 0,
},
end: Position {
line: 0,
character: 0,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: Some(NumberOrString::String("agnix::internal-error".to_string())),
code_description: None,
source: Some("agnix".to_string()),
message: format!("Internal error: {}", error_message),
related_information: None,
tags: None,
data: None,
};
assert_eq!(
diagnostic.code,
Some(NumberOrString::String("agnix::internal-error".to_string()))
);
assert_eq!(diagnostic.source, Some("agnix".to_string()));
assert_eq!(diagnostic.severity, Some(DiagnosticSeverity::ERROR));
assert!(diagnostic.message.contains("Internal error:"));
}
#[test]
fn test_invalid_uri_detection() {
let http_uri = Url::parse("http://example.com/file.md").unwrap();
assert!(http_uri.to_file_path().is_err());
let data_uri = Url::parse("data:text/plain;base64,SGVsbG8=").unwrap();
assert!(data_uri.to_file_path().is_err());
#[cfg(windows)]
let file_uri = Url::parse("file:///C:/tmp/test.md").unwrap();
#[cfg(not(windows))]
let file_uri = Url::parse("file:///tmp/test.md").unwrap();
assert!(file_uri.to_file_path().is_ok());
}
#[tokio::test]
async fn test_validate_file_valid_skill() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
This is a valid skill.
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(), },
})
.await;
}
#[tokio::test]
async fn test_validate_file_invalid_skill() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: Invalid Name With Spaces
version: 1.0.0
model: sonnet
---
# Invalid Skill
This skill has an invalid name.
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_did_save_triggers_validation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_save(DidSaveTextDocumentParams {
text_document: TextDocumentIdentifier { uri },
text: None,
})
.await;
}
#[tokio::test]
async fn test_did_save_project_trigger_starts_project_revalidation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let agents_path = temp_dir.path().join("AGENTS.md");
std::fs::write(&agents_path, "# Root AGENTS").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let before = service
.inner()
.project_validation_generation
.load(Ordering::SeqCst);
let uri = Url::from_file_path(&agents_path).unwrap();
service
.inner()
.did_save(DidSaveTextDocumentParams {
text_document: TextDocumentIdentifier { uri },
text: None,
})
.await;
let mut observed_increment = false;
for _ in 0..40 {
let current = service
.inner()
.project_validation_generation
.load(Ordering::SeqCst);
if current > before {
observed_increment = true;
break;
}
tokio::time::sleep(std::time::Duration::from_millis(25)).await;
}
assert!(
observed_increment,
"did_save on AGENTS.md should trigger project-level revalidation"
);
}
#[tokio::test]
async fn test_did_close_clears_diagnostics() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Test").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri },
})
.await;
}
#[tokio::test]
async fn test_initialized_completes() {
let (service, _socket) = LspService::new(Backend::new);
service.inner().initialized(InitializedParams {}).await;
}
#[tokio::test]
async fn test_non_file_uri_handled_gracefully() {
let (service, _socket) = LspService::new(Backend::new);
let http_uri = Url::parse("http://example.com/test.md").unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: http_uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_validate_nonexistent_file() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let nonexistent_path = temp_dir.path().join("nonexistent.md");
let uri = Url::from_file_path(&nonexistent_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_server_info_version() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
let result = service.inner().initialize(init_params).await.unwrap();
let server_info = result.server_info.unwrap();
let version = server_info.version.unwrap();
assert!(!version.is_empty());
assert!(version.contains('.'));
}
#[tokio::test]
async fn test_initialize_captures_workspace_root() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_initialize_loads_config_from_file() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let config_path = temp_dir.path().join(".agnix.toml");
std::fs::write(
&config_path,
r#"
severity = "Warning"
target = "ClaudeCode"
exclude = []
[rules]
skills = false
"#,
)
.unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_initialize_handles_invalid_config() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let config_path = temp_dir.path().join(".agnix.toml");
std::fs::write(&config_path, "this is not valid toml [[[").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
let result = service.inner().initialize(init_params).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_file_within_workspace_validated() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
service.inner().initialize(init_params).await.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_file_outside_workspace_rejected() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let outside_dir = tempfile::tempdir().unwrap();
let outside_file = outside_dir.path().join("SKILL.md");
std::fs::write(
&outside_file,
r#"---
name: outside-skill
version: 1.0.0
model: sonnet
---
# Outside Skill
"#,
)
.unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
let init_params = InitializeParams {
root_uri: Some(root_uri),
..Default::default()
};
service.inner().initialize(init_params).await.unwrap();
let uri = Url::from_file_path(&outside_file).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_validation_without_workspace_root() {
let (service, _socket) = LspService::new(Backend::new);
let init_params = InitializeParams::default();
service.inner().initialize(init_params).await.unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
#[tokio::test]
async fn test_cached_config_used_for_multiple_validations() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let temp_dir = tempfile::tempdir().unwrap();
for i in 0..3 {
let skill_path = temp_dir.path().join(format!("skill{}/SKILL.md", i));
std::fs::create_dir_all(skill_path.parent().unwrap()).unwrap();
std::fs::write(
&skill_path,
format!(
r#"---
name: test-skill-{}
version: 1.0.0
model: sonnet
---
# Test Skill {}
"#,
i, i
),
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
}
#[tokio::test]
async fn test_cached_registry_used_for_multiple_validations() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let claude_path = temp_dir.path().join("CLAUDE.md");
std::fs::write(
&claude_path,
r#"# Project Memory
This is a test project.
"#,
)
.unwrap();
for path in [&skill_path, &claude_path] {
let uri = Url::from_file_path(path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: String::new(),
},
})
.await;
}
}
#[tokio::test]
async fn test_document_cache_cleared_on_close() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: test\ndescription: Test\n---\n# Test",
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: test\ndescription: Test\n---\n# Test".to_string(),
},
})
.await;
let hover_before = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover_before.is_ok());
assert!(hover_before.unwrap().is_some());
service
.inner()
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
})
.await;
let hover_after = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover_after.is_ok());
assert!(hover_after.unwrap().is_none());
}
#[tokio::test]
async fn test_document_cache_updated_on_change() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Initial").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Initial".to_string(),
},
})
.await;
service
.inner()
.did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 2,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: "---\nname: updated\ndescription: Updated\n---\n# Updated".to_string(),
}],
})
.await;
let hover = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover.is_ok());
assert!(hover.unwrap().is_some());
}
#[tokio::test]
async fn test_get_document_content_returns_shared_arc() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Shared").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Shared".to_string(),
},
})
.await;
let first = service
.inner()
.get_document_content(&uri)
.await
.expect("cached content should exist");
let second = service
.inner()
.get_document_content(&uri)
.await
.expect("cached content should exist");
assert!(Arc::ptr_eq(&first, &second));
}
#[tokio::test]
async fn test_multiple_documents_independent_caches() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill1_path = temp_dir.path().join("skill1").join("SKILL.md");
let skill2_path = temp_dir.path().join("skill2").join("SKILL.md");
std::fs::create_dir_all(skill1_path.parent().unwrap()).unwrap();
std::fs::create_dir_all(skill2_path.parent().unwrap()).unwrap();
std::fs::write(
&skill1_path,
"---\nname: skill-one\ndescription: First\n---\n# One",
)
.unwrap();
std::fs::write(
&skill2_path,
"---\nname: skill-two\ndescription: Second\n---\n# Two",
)
.unwrap();
let uri1 = Url::from_file_path(&skill1_path).unwrap();
let uri2 = Url::from_file_path(&skill2_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri1.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: skill-one\ndescription: First\n---\n# One".to_string(),
},
})
.await;
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri2.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: skill-two\ndescription: Second\n---\n# Two".to_string(),
},
})
.await;
service
.inner()
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri: uri1.clone() },
})
.await;
let hover1 = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri1 },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover1.is_ok());
assert!(hover1.unwrap().is_none());
let hover2 = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri2 },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(hover2.is_ok());
assert!(hover2.unwrap().is_some());
}
#[tokio::test]
async fn test_did_change_configuration_valid_settings() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"severity": "Error",
"target": "ClaudeCode",
"rules": {
"skills": false,
"hooks": true
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_partial_settings() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"severity": "Info"
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_invalid_json() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!("not an object");
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[test]
fn test_config_revalidation_concurrency_bounds() {
let expected_cap = std::thread::available_parallelism()
.map(|count| count.get())
.unwrap_or(4)
.clamp(1, MAX_CONFIG_REVALIDATION_CONCURRENCY);
assert_eq!(config_revalidation_concurrency(0), 0);
assert_eq!(config_revalidation_concurrency(1), 1);
assert_eq!(
config_revalidation_concurrency(MAX_CONFIG_REVALIDATION_CONCURRENCY * 4),
expected_cap
);
}
#[tokio::test]
async fn test_for_each_bounded_empty_input() {
let errors = for_each_bounded(Vec::<usize>::new(), 3, |_| async {}).await;
assert!(errors.is_empty());
}
#[tokio::test]
async fn test_for_each_bounded_collects_join_errors() {
let errors = for_each_bounded(vec![0usize, 1, 2], 2, |idx| async move {
if idx == 1 {
panic!("intentional panic for join error coverage");
}
})
.await;
assert_eq!(errors.len(), 1);
assert!(errors[0].is_panic());
}
#[tokio::test]
async fn test_should_publish_diagnostics_guard() {
let (service, _socket) = LspService::new(Backend::new);
let backend = service.inner();
let temp_dir = tempfile::tempdir().unwrap();
let path = temp_dir.path().join("SKILL.md");
std::fs::write(&path, "# test").unwrap();
let uri = Url::from_file_path(&path).unwrap();
let snapshot = Arc::new("# test".to_string());
backend
.documents
.write()
.await
.insert(uri.clone(), Arc::clone(&snapshot));
backend.config_generation.store(7, Ordering::SeqCst);
assert!(
backend
.should_publish_diagnostics(&uri, Some(7), Some(&snapshot))
.await
);
assert!(
!backend
.should_publish_diagnostics(&uri, Some(6), Some(&snapshot))
.await
);
backend
.documents
.write()
.await
.insert(uri.clone(), Arc::new("# updated".to_string()));
assert!(
!backend
.should_publish_diagnostics(&uri, Some(7), Some(&snapshot))
.await
);
backend.documents.write().await.remove(&uri);
assert!(
!backend
.should_publish_diagnostics(&uri, Some(7), Some(&snapshot))
.await
);
assert!(backend.should_publish_diagnostics(&uri, None, None).await);
}
#[tokio::test]
async fn test_did_change_configuration_concurrency_bound_helper() {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::time::Duration;
use tokio::sync::Barrier;
let max_concurrency = 3usize;
let in_flight = Arc::new(AtomicUsize::new(0));
let peak_in_flight = Arc::new(AtomicUsize::new(0));
let completed = Arc::new(AtomicUsize::new(0));
let ready = Arc::new(Barrier::new(max_concurrency + 1));
let release = Arc::new(Barrier::new(max_concurrency + 1));
let total_items = 12usize;
let run = tokio::spawn(for_each_bounded(0..total_items, max_concurrency, {
let in_flight = Arc::clone(&in_flight);
let peak_in_flight = Arc::clone(&peak_in_flight);
let completed = Arc::clone(&completed);
let ready = Arc::clone(&ready);
let release = Arc::clone(&release);
move |idx| {
let in_flight = Arc::clone(&in_flight);
let peak_in_flight = Arc::clone(&peak_in_flight);
let completed = Arc::clone(&completed);
let ready = Arc::clone(&ready);
let release = Arc::clone(&release);
async move {
let current = in_flight.fetch_add(1, Ordering::SeqCst) + 1;
peak_in_flight.fetch_max(current, Ordering::SeqCst);
if idx < max_concurrency {
ready.wait().await;
release.wait().await;
} else {
tokio::task::yield_now().await;
}
in_flight.fetch_sub(1, Ordering::SeqCst);
completed.fetch_add(1, Ordering::SeqCst);
}
}
}));
tokio::time::timeout(Duration::from_secs(2), ready.wait())
.await
.expect("timed out waiting for first wave");
assert_eq!(peak_in_flight.load(Ordering::SeqCst), max_concurrency);
tokio::time::timeout(Duration::from_secs(2), release.wait())
.await
.expect("timed out releasing first wave");
let join_errors = tokio::time::timeout(Duration::from_secs(2), run)
.await
.expect("timed out waiting for bounded worker completion")
.unwrap();
assert!(join_errors.is_empty());
assert_eq!(completed.load(Ordering::SeqCst), total_items);
}
#[tokio::test]
async fn test_did_change_configuration_triggers_revalidation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
r#"---
name: test-skill
version: 1.0.0
model: sonnet
---
# Test Skill
"#,
)
.unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
let settings = serde_json::json!({
"severity": "Error",
"rules": {
"skills": false
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_triggers_revalidation_for_multiple_documents() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let document_count = 6usize;
for i in 0..document_count {
let skill_path = temp_dir.path().join(format!("skill-{i}/SKILL.md"));
std::fs::create_dir_all(skill_path.parent().unwrap()).unwrap();
std::fs::write(
&skill_path,
format!(
r#"---
name: test-skill-{i}
version: 1.0.0
model: sonnet
---
# Test Skill {i}
"#
),
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
}
let settings = serde_json::json!({
"severity": "Error",
"rules": {
"skills": false
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
let open_documents = service.inner().documents.read().await.len();
assert_eq!(open_documents, document_count);
}
#[tokio::test]
async fn test_did_change_configuration_empty_settings() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_versions() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"versions": {
"claude_code": "1.0.0",
"codex": "0.1.0",
"cursor": "0.45.0",
"copilot": "1.2.0"
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_specs() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"specs": {
"mcp_protocol": "2025-11-25",
"agent_skills_spec": "1.0",
"agents_md_spec": "1.0"
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_tools_array() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"tools": ["claude-code", "cursor", "github-copilot"]
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_disabled_rules() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"rules": {
"disabled_rules": ["AS-001", "PE-003", "MCP-008"]
}
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
}
#[tokio::test]
async fn test_did_change_configuration_with_locale() {
let (service, _socket) = {
let _guard = crate::locale::LOCALE_MUTEX.lock().unwrap();
LspService::new(Backend::new)
};
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let settings = serde_json::json!({
"severity": "Warning",
"locale": "es"
});
service
.inner()
.did_change_configuration(DidChangeConfigurationParams { settings })
.await;
{
let _guard = crate::locale::LOCALE_MUTEX.lock().unwrap();
assert_eq!(&*rust_i18n::locale(), "es");
}
rust_i18n::set_locale("en");
}
#[test]
fn test_normalize_path_resolves_parent() {
let result = normalize_path(Path::new("/a/b/../c"));
assert_eq!(result, PathBuf::from("/a/c"));
}
#[test]
fn test_normalize_path_removes_curdir() {
let result = normalize_path(Path::new("/a/./b/./c"));
assert_eq!(result, PathBuf::from("/a/b/c"));
}
#[test]
fn test_normalize_path_multiple_parent() {
let result = normalize_path(Path::new("/a/b/../../c"));
assert_eq!(result, PathBuf::from("/c"));
}
#[test]
fn test_normalize_path_already_clean() {
let result = normalize_path(Path::new("/a/b/c"));
assert_eq!(result, PathBuf::from("/a/b/c"));
}
#[test]
fn test_normalize_path_cannot_escape_root() {
let result = normalize_path(Path::new("/../a"));
assert_eq!(result, PathBuf::from("/a"));
}
#[test]
fn test_normalize_path_root_only() {
let result = normalize_path(Path::new("/"));
assert_eq!(result, PathBuf::from("/"));
}
#[test]
fn test_normalize_path_excessive_parent_traversal() {
let result = normalize_path(Path::new("/a/../../../b"));
assert_eq!(result, PathBuf::from("/b"));
}
#[test]
fn test_normalize_path_mixed_special_components() {
let result = normalize_path(Path::new("/a/./b/../c/./d"));
assert_eq!(result, PathBuf::from("/a/c/d"));
}
#[tokio::test]
async fn test_path_traversal_outside_workspace_rejected() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let outside_dir = tempfile::tempdir().unwrap();
let outside_name = outside_dir
.path()
.file_name()
.expect("should have a file name")
.to_str()
.expect("should be valid UTF-8");
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let traversal_path = workspace_dir
.path()
.join("..")
.join("..")
.join(outside_name)
.join("SKILL.md");
let uri = Url::from_file_path(&traversal_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: evil\n---\n# Evil".to_string(),
},
})
.await;
}
#[tokio::test]
async fn test_path_traversal_inside_workspace_accepted() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let subdir = workspace_dir.path().join("subdir");
std::fs::create_dir(&subdir).unwrap();
let skill_path = workspace_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: test-skill\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Test Skill\n",
)
.unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let traversal_path = workspace_dir
.path()
.join("subdir")
.join("..")
.join("SKILL.md");
let uri = Url::from_file_path(&traversal_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
}
#[tokio::test]
async fn test_nonexistent_file_in_workspace_accepted() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let nonexistent = workspace_dir.path().join("SKILL.md");
let uri = Url::from_file_path(&nonexistent).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: ghost\n---\n# Ghost".to_string(),
},
})
.await;
}
#[tokio::test]
async fn test_dot_components_in_path_accepted() {
let (service, _socket) = LspService::new(Backend::new);
let workspace_dir = tempfile::tempdir().unwrap();
let skill_path = workspace_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: test-skill\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Test Skill\n",
)
.unwrap();
let root_uri = Url::from_file_path(workspace_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let dot_path = format!("{}/./SKILL.md", workspace_dir.path().display());
let uri = Url::parse(&format!("file://{}", dot_path)).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
}
#[tokio::test]
async fn test_validate_project_rules_no_workspace() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
service.inner().validate_project_rules_and_publish().await;
let proj_diags = service.inner().project_level_diagnostics.read().await;
assert!(
proj_diags.is_empty(),
"No project diagnostics should be stored without workspace root"
);
}
#[tokio::test]
async fn test_project_diagnostics_cached() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
std::fs::write(temp_dir.path().join("AGENTS.md"), "# Root").unwrap();
let sub = temp_dir.path().join("sub");
std::fs::create_dir(&sub).unwrap();
std::fs::write(sub.join("AGENTS.md"), "# Sub").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
for _ in 0..80 {
let proj_diags = service.inner().project_level_diagnostics.read().await;
if !proj_diags.is_empty() {
break;
}
drop(proj_diags);
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
}
let proj_diags = service.inner().project_level_diagnostics.read().await;
assert!(
!proj_diags.is_empty(),
"Project diagnostics should be cached for AGM-006"
);
let proj_uris = service.inner().project_diagnostics_uris.read().await;
assert!(
!proj_uris.is_empty(),
"Project diagnostic URIs should be tracked"
);
}
#[tokio::test]
async fn test_project_diagnostics_cleared_on_rerun() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
std::fs::write(temp_dir.path().join("AGENTS.md"), "# Root").unwrap();
let sub = temp_dir.path().join("sub");
std::fs::create_dir(&sub).unwrap();
std::fs::write(sub.join("AGENTS.md"), "# Sub").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
for _ in 0..80 {
let proj_diags = service.inner().project_level_diagnostics.read().await;
if !proj_diags.is_empty() {
break;
}
drop(proj_diags);
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
}
let count_before = service.inner().project_diagnostics_uris.read().await.len();
assert!(
count_before > 0,
"Should have project diagnostics before cleanup"
);
std::fs::remove_file(sub.join("AGENTS.md")).unwrap();
service.inner().validate_project_rules_and_publish().await;
let proj_diags = service.inner().project_level_diagnostics.read().await;
let agm006_count: usize = proj_diags
.values()
.flat_map(|diags| diags.iter())
.filter(|d| {
d.code
.as_ref()
.map(|c| matches!(c, NumberOrString::String(s) if s == "AGM-006"))
.unwrap_or(false)
})
.count();
assert_eq!(agm006_count, 0, "AGM-006 should be cleared after fix");
}
#[tokio::test]
async fn test_project_validation_stale_generation_returns_early() {
let (service, _socket) = LspService::new(Backend::new);
let backend = service.inner().clone();
let temp_dir = tempfile::tempdir().unwrap();
std::fs::write(temp_dir.path().join("AGENTS.md"), "# Root").unwrap();
let sub = temp_dir.path().join("sub");
std::fs::create_dir(&sub).unwrap();
std::fs::write(sub.join("AGENTS.md"), "# Sub").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let sentinel_path = temp_dir.path().join("sentinel.md");
let sentinel_uri = Url::from_file_path(&sentinel_path).unwrap();
let sentinel_diag = Diagnostic {
range: Range {
start: Position {
line: 0,
character: 0,
},
end: Position {
line: 0,
character: 0,
},
},
severity: Some(DiagnosticSeverity::WARNING),
code: Some(NumberOrString::String("SENTINEL".to_string())),
code_description: None,
source: Some("agnix".to_string()),
message: "sentinel".to_string(),
related_information: None,
tags: None,
data: None,
};
{
let mut proj_diags = service.inner().project_level_diagnostics.write().await;
proj_diags.insert(sentinel_uri.clone(), vec![sentinel_diag]);
}
{
let mut proj_uris = service.inner().project_diagnostics_uris.write().await;
proj_uris.insert(sentinel_uri.clone());
}
let bump_backend = service.inner().clone();
let bump = tokio::spawn(async move {
for _ in 0..200 {
bump_backend
.project_validation_generation
.store(9_999, Ordering::SeqCst);
tokio::task::yield_now().await;
}
});
backend.validate_project_rules_and_publish().await;
bump.abort();
let proj_diags = service.inner().project_level_diagnostics.read().await;
assert!(
proj_diags.contains_key(&sentinel_uri),
"stale generation run should return before overwriting cached diagnostics"
);
let proj_uris = service.inner().project_diagnostics_uris.read().await;
assert!(
proj_uris.contains(&sentinel_uri),
"stale generation run should return before mutating cached URI set"
);
}
#[test]
fn test_is_project_level_trigger() {
assert!(Backend::is_project_level_trigger(Path::new(
"/project/CLAUDE.md"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/AGENTS.md"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/.clinerules"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/.cursorrules"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/.github/copilot-instructions.md"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/.github/instructions/test.instructions.md"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/.cursor/rules/test.mdc"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/GEMINI.md"
)));
assert!(Backend::is_project_level_trigger(Path::new(
"/project/.agnix.toml"
)));
assert!(!Backend::is_project_level_trigger(Path::new(
"/project/SKILL.md"
)));
assert!(!Backend::is_project_level_trigger(Path::new(
"/project/README.md"
)));
assert!(!Backend::is_project_level_trigger(Path::new(
"/project/settings.json"
)));
assert!(!Backend::is_project_level_trigger(Path::new(
"/project/plugin.json"
)));
}
#[tokio::test]
async fn test_initialize_advertises_execute_command() {
let (service, _socket) = LspService::new(Backend::new);
let result = service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
match result.capabilities.execute_command_provider {
Some(ref opts) => {
assert!(
opts.commands
.contains(&"agnix.validateProjectRules".to_string()),
"Expected agnix.validateProjectRules in execute commands, got: {:?}",
opts.commands
);
}
None => panic!("Expected execute command capability"),
}
}
#[tokio::test]
async fn test_execute_command_validate_project_rules() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let result = service
.inner()
.execute_command(ExecuteCommandParams {
command: "agnix.validateProjectRules".to_string(),
arguments: vec![],
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(result.is_ok());
assert!(result.unwrap().is_none());
}
#[tokio::test]
async fn test_execute_command_unknown() {
let (service, _socket) = LspService::new(Backend::new);
service
.inner()
.initialize(InitializeParams::default())
.await
.unwrap();
let result = service
.inner()
.execute_command(ExecuteCommandParams {
command: "unknown.command".to_string(),
arguments: vec![],
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
assert!(result.is_ok());
assert!(result.unwrap().is_none());
}
#[tokio::test]
async fn test_project_and_file_diagnostics_merged() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let claude_path = temp_dir.path().join("CLAUDE.md");
std::fs::write(&claude_path, "<unclosed>\n# Project\n").unwrap();
let uri = Url::from_file_path(&claude_path).unwrap();
for _ in 0..80 {
let generation = service
.inner()
.project_validation_generation
.load(std::sync::atomic::Ordering::SeqCst);
if generation >= 1 {
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
break;
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
}
{
let fake_project_diag = Diagnostic {
range: Range {
start: Position {
line: 0,
character: 0,
},
end: Position {
line: 0,
character: 0,
},
},
severity: Some(DiagnosticSeverity::WARNING),
code: Some(NumberOrString::String("AGM-006".to_string())),
code_description: None,
source: Some("agnix".to_string()),
message: "Nested AGENTS.md detected".to_string(),
related_information: None,
tags: None,
data: None,
};
let mut proj_diags = service.inner().project_level_diagnostics.write().await;
proj_diags.insert(uri.clone(), vec![fake_project_diag]);
}
{
let proj_diags = service.inner().project_level_diagnostics.read().await;
assert!(
proj_diags.contains_key(&uri),
"Project diagnostics should be pre-populated for the URI"
);
}
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&claude_path).unwrap(),
},
})
.await;
{
let proj_diags = service.inner().project_level_diagnostics.read().await;
let diags = proj_diags
.get(&uri)
.expect("Project diagnostics should still be cached");
assert!(
diags
.iter()
.any(|d| d.code == Some(NumberOrString::String("AGM-006".to_string()))),
"Cached project diagnostic should be preserved after merge"
);
}
}
#[tokio::test]
async fn test_for_each_bounded_concurrency_limit_one() {
use std::sync::atomic::{AtomicUsize, Ordering};
let max_concurrent = Arc::new(AtomicUsize::new(0));
let current = Arc::new(AtomicUsize::new(0));
let items: Vec<usize> = (0..5).collect();
let max_c = Arc::clone(&max_concurrent);
let cur = Arc::clone(¤t);
let errors = for_each_bounded(items, 1, move |_item| {
let max_c = Arc::clone(&max_c);
let cur = Arc::clone(&cur);
async move {
let c = cur.fetch_add(1, Ordering::SeqCst) + 1;
max_c.fetch_max(c, Ordering::SeqCst);
tokio::task::yield_now().await;
cur.fetch_sub(1, Ordering::SeqCst);
}
})
.await;
assert!(errors.is_empty());
assert_eq!(
max_concurrent.load(Ordering::SeqCst),
1,
"With concurrency limit 1, at most 1 task should run concurrently"
);
}
#[tokio::test]
async fn test_for_each_bounded_zero_concurrency_defaults_to_one() {
let items = vec![1, 2, 3];
let count = Arc::new(std::sync::atomic::AtomicUsize::new(0));
let count_clone = Arc::clone(&count);
let errors = for_each_bounded(items, 0, move |_| {
let count = Arc::clone(&count_clone);
async move {
count.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
}
})
.await;
assert!(errors.is_empty());
assert_eq!(
count.load(std::sync::atomic::Ordering::SeqCst),
3,
"All items should be processed even with concurrency 0"
);
}
#[tokio::test]
async fn test_generic_markdown_not_validated() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let notes_path = temp_dir.path().join("notes.md");
let content = "<unclosed>\n# Some developer notes\n";
std::fs::write(¬es_path, content).unwrap();
let uri = Url::from_file_path(¬es_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: content.to_string(),
},
})
.await;
let docs = service.inner().documents.read().await;
assert!(
docs.contains_key(&uri),
"Document should be cached (did_open always caches)"
);
}
#[tokio::test]
async fn test_hover_returns_none_for_generic_markdown() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let notes_path = temp_dir.path().join("notes.md");
let content = "---\nname: test\n---\n# Notes\n";
std::fs::write(¬es_path, content).unwrap();
let uri = Url::from_file_path(¬es_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: content.to_string(),
},
})
.await;
let hover_result = service
.inner()
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: Default::default(),
})
.await
.unwrap();
assert!(
hover_result.is_none(),
"Hover should return None for GenericMarkdown files"
);
}
#[tokio::test]
async fn test_agent_config_files_still_validated() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let claude_path = temp_dir.path().join("CLAUDE.md");
let content = "# Project\n\nSome instructions.\n";
std::fs::write(&claude_path, content).unwrap();
let uri = Url::from_file_path(&claude_path).unwrap();
let config = service.inner().config.load();
let file_type = agnix_core::resolve_file_type(&claude_path, &config);
assert!(
!file_type.is_generic(),
"CLAUDE.md should NOT be classified as generic (got {:?})",
file_type
);
assert_eq!(file_type, agnix_core::FileType::ClaudeMd);
drop(config);
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: content.to_string(),
},
})
.await;
}
#[tokio::test]
async fn test_disabled_validators_respected_in_content_validation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let config_path = temp_dir.path().join(".agnix.toml");
std::fs::write(
&config_path,
r#"
[rules]
disabled_validators = ["XmlValidator"]
"#,
)
.unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let config = service.inner().config.load();
assert!(
config
.rules()
.disabled_validators
.iter()
.any(|v| v == "XmlValidator"),
"XmlValidator should be in disabled_validators list"
);
drop(config);
let claude_path = temp_dir.path().join("CLAUDE.md");
let content = "<unclosed>\n# Project\n";
std::fs::write(&claude_path, content).unwrap();
let uri = Url::from_file_path(&claude_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: content.to_string(),
},
})
.await;
}
#[tokio::test]
async fn test_project_validation_starts_in_initialize() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
std::fs::write(temp_dir.path().join("AGENTS.md"), "# Root").unwrap();
let sub = temp_dir.path().join("sub");
std::fs::create_dir(&sub).unwrap();
std::fs::write(sub.join("AGENTS.md"), "# Sub").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let mut found = false;
for _ in 0..80 {
let proj_diags = service.inner().project_level_diagnostics.read().await;
if !proj_diags.is_empty() {
found = true;
break;
}
drop(proj_diags);
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
}
assert!(
found,
"Project-level validation should run during initialize(), \
producing AGM-006 diagnostics for duplicate AGENTS.md files"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_concurrent_document_open_close() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let doc_count = 20usize;
for i in 0..doc_count {
let dir = temp_dir.path().join(format!("skill-{i}"));
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(
dir.join("SKILL.md"),
format!(
"---\nname: stress-skill-{i}\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Stress Skill {i}\n"
),
)
.unwrap();
}
let backend = service.inner().clone();
let result = tokio::time::timeout(std::time::Duration::from_secs(30), async {
let mut handles = Vec::new();
for i in 0..doc_count {
let backend = backend.clone();
let path = temp_dir.path().join(format!("skill-{i}")).join("SKILL.md");
let content = std::fs::read_to_string(&path).unwrap();
let uri = Url::from_file_path(&path).unwrap();
handles.push(tokio::spawn(async move {
backend
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: content,
},
})
.await;
backend
.did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri },
})
.await;
}));
}
for handle in handles {
handle.await.expect("task should not panic");
}
})
.await;
assert!(result.is_ok(), "concurrent open/close timed out");
let docs = service.inner().documents.read().await;
assert!(
docs.is_empty(),
"documents cache should be empty after all close operations, found {} entries",
docs.len()
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_rapid_config_changes_drop_stale_batches() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: stress-skill\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Stress Test\n",
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
{
let mut docs = service.inner().documents.write().await;
docs.insert(
uri.clone(),
Arc::new(std::fs::read_to_string(&skill_path).unwrap()),
);
}
let change_count = 50u64;
let backend_a = service.inner().clone();
let backend_b = service.inner().clone();
let uri_b = uri.clone();
let task_a = tokio::spawn(async move {
for _ in 0..change_count {
backend_a.config_generation.fetch_add(1, Ordering::SeqCst);
tokio::task::yield_now().await;
}
});
let task_b = tokio::spawn(async move {
let mut stale_count = 0u32;
for probe_gen in 0..change_count {
if !backend_b
.should_publish_diagnostics(&uri_b, Some(probe_gen), None)
.await
{
stale_count += 1;
}
tokio::task::yield_now().await;
}
stale_count
});
let result = tokio::time::timeout(std::time::Duration::from_secs(10), async move {
task_a.await.unwrap();
task_b.await.unwrap()
})
.await;
assert!(
result.is_ok(),
"concurrent config_generation stress test timed out"
);
let _ = result.unwrap();
let final_gen = service.inner().config_generation.load(Ordering::SeqCst);
assert_eq!(
final_gen, change_count,
"config_generation should be {} after {} increments, got {}",
change_count, change_count, final_gen
);
let backend = service.inner().clone();
for probe in 0..change_count {
assert!(
!backend
.should_publish_diagnostics(&uri, Some(probe), None)
.await,
"probe_gen {} should be stale when config_generation is {}",
probe,
final_gen
);
}
assert_eq!(
service.inner().documents.read().await.len(),
1,
"document should still be in cache after concurrent stress"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_concurrent_changes_same_document() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: concurrent-skill\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Concurrent\n",
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&skill_path).unwrap(),
},
})
.await;
let backend = service.inner().clone();
let change_count = 30usize;
let result = tokio::time::timeout(std::time::Duration::from_secs(30), async {
let mut handles = Vec::new();
for i in 0..change_count {
let backend = backend.clone();
let uri = uri.clone();
handles.push(tokio::spawn(async move {
backend
.did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri,
version: (i + 2) as i32,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: format!(
"---\nname: v{i}\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Version {i}\n"
),
}],
})
.await;
}));
}
for handle in handles {
handle.await.expect("task should not panic");
}
})
.await;
assert!(result.is_ok(), "concurrent changes timed out");
let docs = service.inner().documents.read().await;
assert_eq!(
docs.len(),
1,
"exactly 1 entry should be in cache for the document, found {}",
docs.len()
);
assert!(
docs.contains_key(&uri),
"the URI should still be present in the cache"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_config_change_during_active_validation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
for i in 0..5 {
let dir = temp_dir.path().join(format!("skill-{i}"));
std::fs::create_dir_all(&dir).unwrap();
let path = dir.join("SKILL.md");
std::fs::write(
&path,
format!(
"---\nname: active-skill-{i}\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Active Skill {i}\n"
),
)
.unwrap();
let uri = Url::from_file_path(&path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&path).unwrap(),
},
})
.await;
}
let backend_a = service.inner().clone();
let backend_b = service.inner().clone();
let result = tokio::time::timeout(std::time::Duration::from_secs(30), async {
let task_a = tokio::spawn(async move {
backend_a
.did_change_configuration(DidChangeConfigurationParams {
settings: serde_json::json!({ "severity": "Warning" }),
})
.await;
});
let task_b = tokio::spawn(async move {
backend_b
.config_generation
.fetch_add(9_999, Ordering::SeqCst);
});
task_a.await.expect("config change task should not panic");
task_b.await.expect("generation bump task should not panic");
})
.await;
assert!(result.is_ok(), "concurrent config change timed out");
let generation = service.inner().config_generation.load(Ordering::SeqCst);
assert_eq!(
generation, 10_000,
"config_generation should be 10000 (1 from config change + 9999 from bump), got {}",
generation
);
let open_docs = service.inner().documents.read().await.len();
assert_eq!(
open_docs, 5,
"all 5 documents should still be in cache, found {}",
open_docs
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_concurrent_project_and_file_validation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
std::fs::write(temp_dir.path().join("AGENTS.md"), "# Root AGENTS").unwrap();
let sub = temp_dir.path().join("sub");
std::fs::create_dir(&sub).unwrap();
std::fs::write(sub.join("AGENTS.md"), "# Sub AGENTS").unwrap();
for i in 0..5 {
let dir = temp_dir.path().join(format!("skill-{i}"));
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(
dir.join("SKILL.md"),
format!(
"---\nname: project-skill-{i}\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Project Skill {i}\n"
),
)
.unwrap();
}
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
for path in [temp_dir.path().join("AGENTS.md"), sub.join("AGENTS.md")] {
let uri = Url::from_file_path(&path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&path).unwrap(),
},
})
.await;
}
for i in 0..5 {
let path = temp_dir.path().join(format!("skill-{i}")).join("SKILL.md");
let uri = Url::from_file_path(&path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&path).unwrap(),
},
})
.await;
}
let sync_result = tokio::time::timeout(std::time::Duration::from_secs(10), async {
loop {
{
let proj_diags = service.inner().project_level_diagnostics.read().await;
if !proj_diags.is_empty() {
break;
}
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
}
})
.await;
assert!(
sync_result.is_ok(),
"initialize() background project validation did not complete within 10s"
);
let backend_project = service.inner().clone();
let result = tokio::time::timeout(std::time::Duration::from_secs(30), async {
let mut handles = Vec::new();
handles.push(tokio::spawn(async move {
backend_project.validate_project_rules_and_publish().await;
}));
for i in 0..5 {
let backend = service.inner().clone();
let path = temp_dir
.path()
.join(format!("skill-{i}"))
.join("SKILL.md");
let uri = Url::from_file_path(&path).unwrap();
handles.push(tokio::spawn(async move {
backend
.did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri,
version: 2,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: format!(
"---\nname: updated-skill-{i}\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Updated {i}\n"
),
}],
})
.await;
}));
}
for handle in handles {
handle.await.expect("task should not panic");
}
})
.await;
assert!(
result.is_ok(),
"concurrent project and file validation timed out"
);
let proj_diags = service.inner().project_level_diagnostics.read().await;
assert!(
!proj_diags.is_empty(),
"project_level_diagnostics should be non-empty (AGM-006 from duplicate AGENTS.md)"
);
drop(proj_diags);
let open_docs = service.inner().documents.read().await.len();
assert_eq!(
open_docs, 7,
"all 7 documents should still be in cache, found {}",
open_docs
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_high_document_count_revalidation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let doc_count = 20usize;
for i in 0..doc_count {
let dir = temp_dir.path().join(format!("skill-{i}"));
std::fs::create_dir_all(&dir).unwrap();
let path = dir.join("SKILL.md");
std::fs::write(
&path,
format!(
"---\nname: high-count-skill-{i}\nversion: 1.0.0\nmodel: sonnet\n---\n\n# High Count {i}\n"
),
)
.unwrap();
let uri = Url::from_file_path(&path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&path).unwrap(),
},
})
.await;
}
let result = tokio::time::timeout(std::time::Duration::from_secs(30), async {
service
.inner()
.did_change_configuration(DidChangeConfigurationParams {
settings: serde_json::json!({ "severity": "Error" }),
})
.await;
})
.await;
assert!(
result.is_ok(),
"high document count revalidation timed out after 30s"
);
let generation = service.inner().config_generation.load(Ordering::SeqCst);
assert_eq!(
generation, 1,
"config_generation should be 1 after single config change, got {}",
generation
);
let open_docs = service.inner().documents.read().await.len();
assert_eq!(
open_docs, doc_count,
"all {} documents should still be in cache, found {}",
doc_count, open_docs
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_concurrent_hover_during_validation() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
let content = "---\nname: hover-skill\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Hover Skill\n";
std::fs::write(&skill_path, content).unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: content.to_string(),
},
})
.await;
let result = tokio::time::timeout(std::time::Duration::from_secs(30), async {
let mut handles = Vec::new();
for i in 0..10 {
let backend = service.inner().clone();
let uri = uri.clone();
handles.push(tokio::spawn(async move {
backend
.did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri,
version: (i + 2) as i32,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: format!(
"---\nname: hover-v{i}\nversion: 1.0.0\nmodel: sonnet\n---\n\n# Hover V{i}\n"
),
}],
})
.await;
}));
}
for _ in 0..10 {
let backend = service.inner().clone();
let uri = uri.clone();
handles.push(tokio::spawn(async move {
let _ = backend
.hover(HoverParams {
text_document_position_params: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 0,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
})
.await;
}));
}
for handle in handles {
handle.await.expect("task should not panic");
}
})
.await;
assert!(
result.is_ok(),
"concurrent hover during validation timed out"
);
let docs = service.inner().documents.read().await;
assert!(
docs.contains_key(&uri),
"document should still be in cache after concurrent hover and changes"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_stress_rapid_project_validation_generation_guard() {
let (service, _socket) = LspService::new(Backend::new);
let temp_dir = tempfile::tempdir().unwrap();
std::fs::write(temp_dir.path().join("AGENTS.md"), "# Root AGENTS").unwrap();
let sub = temp_dir.path().join("sub");
std::fs::create_dir(&sub).unwrap();
std::fs::write(sub.join("AGENTS.md"), "# Sub AGENTS").unwrap();
let root_uri = Url::from_file_path(temp_dir.path()).unwrap();
service
.inner()
.initialize(InitializeParams {
root_uri: Some(root_uri),
..Default::default()
})
.await
.unwrap();
let init_sync = tokio::time::timeout(std::time::Duration::from_secs(10), async {
loop {
{
let proj_diags = service.inner().project_level_diagnostics.read().await;
if !proj_diags.is_empty() {
break;
}
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
}
})
.await;
assert!(
init_sync.is_ok(),
"initialize() background project validation did not complete within 10s"
);
for path in [temp_dir.path().join("AGENTS.md"), sub.join("AGENTS.md")] {
let uri = Url::from_file_path(&path).unwrap();
service
.inner()
.did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri,
language_id: "markdown".to_string(),
version: 1,
text: std::fs::read_to_string(&path).unwrap(),
},
})
.await;
}
let validation_count = 10usize;
let result = tokio::time::timeout(std::time::Duration::from_secs(30), async {
let mut handles = Vec::new();
for _ in 0..validation_count {
let backend = service.inner().clone();
handles.push(tokio::spawn(async move {
backend.validate_project_rules_and_publish().await;
}));
}
for handle in handles {
handle.await.expect("task should not panic");
}
})
.await;
assert!(result.is_ok(), "rapid project validation timed out");
let generation = service
.inner()
.project_validation_generation
.load(Ordering::SeqCst);
assert!(
generation >= validation_count as u64,
"project_validation_generation should be >= {}, got {}",
validation_count,
generation
);
}
#[tokio::test]
async fn test_document_version_tracked_on_open() {
let backend = Backend::new_test();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Test").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Test".to_string(),
},
})
.await;
let version = backend.get_document_version(&uri).await;
assert_eq!(version, Some(1));
}
#[tokio::test]
async fn test_document_version_updated_on_change() {
let backend = Backend::new_test();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Test").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Test".to_string(),
},
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(1));
backend
.handle_did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 2,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: "# Updated".to_string(),
}],
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(2));
}
#[tokio::test]
async fn test_document_version_cleared_on_close() {
let backend = Backend::new_test();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Test").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Test".to_string(),
},
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(1));
backend
.handle_did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
})
.await;
assert_eq!(backend.get_document_version(&uri).await, None);
}
#[tokio::test]
async fn test_document_version_returns_none_for_unknown_uri() {
let backend = Backend::new_test();
let temp_dir = tempfile::tempdir().unwrap();
let never_opened = temp_dir.path().join("never-opened.md");
let uri = Url::from_file_path(&never_opened).unwrap();
assert_eq!(backend.get_document_version(&uri).await, None);
}
#[tokio::test]
async fn test_document_version_updated_even_on_empty_content_changes() {
let backend = Backend::new_test();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(&skill_path, "# Test").unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "# Test".to_string(),
},
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(1));
backend
.handle_did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 2,
},
content_changes: vec![],
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(2));
}
#[tokio::test]
async fn test_multiple_documents_track_independent_versions() {
let backend = Backend::new_test();
let temp_dir = tempfile::tempdir().unwrap();
let path_a = temp_dir.path().join("a").join("SKILL.md");
let path_b = temp_dir.path().join("b").join("SKILL.md");
std::fs::create_dir_all(path_a.parent().unwrap()).unwrap();
std::fs::create_dir_all(path_b.parent().unwrap()).unwrap();
std::fs::write(&path_a, "# A").unwrap();
std::fs::write(&path_b, "# B").unwrap();
let uri_a = Url::from_file_path(&path_a).unwrap();
let uri_b = Url::from_file_path(&path_b).unwrap();
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri_a.clone(),
language_id: "markdown".to_string(),
version: 5,
text: "# A".to_string(),
},
})
.await;
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri_b.clone(),
language_id: "markdown".to_string(),
version: 10,
text: "# B".to_string(),
},
})
.await;
assert_eq!(backend.get_document_version(&uri_a).await, Some(5));
assert_eq!(backend.get_document_version(&uri_b).await, Some(10));
backend
.handle_did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri_a.clone(),
version: 6,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: "# A updated".to_string(),
}],
})
.await;
assert_eq!(backend.get_document_version(&uri_a).await, Some(6));
assert_eq!(backend.get_document_version(&uri_b).await, Some(10));
}
#[tokio::test]
async fn test_document_version_lifecycle_through_events() {
let backend = Backend::new_test();
let temp_dir = tempfile::tempdir().unwrap();
let skill_path = temp_dir.path().join("SKILL.md");
std::fs::write(
&skill_path,
"---\nname: lifecycle\nversion: 1.0.0\nmodel: sonnet\n---\n# Lifecycle Test\n",
)
.unwrap();
let uri = Url::from_file_path(&skill_path).unwrap();
assert_eq!(backend.get_document_version(&uri).await, None);
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text:
"---\nname: lifecycle\nversion: 1.0.0\nmodel: sonnet\n---\n# Lifecycle Test\n"
.to_string(),
},
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(1));
backend
.handle_did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 2,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: "---\nname: lifecycle\nversion: 2.0.0\nmodel: sonnet\n---\n# Lifecycle Test v2\n".to_string(),
}],
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(2));
backend
.handle_did_change(DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 5,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: "---\nname: lifecycle\nversion: 3.0.0\nmodel: sonnet\n---\n# Lifecycle Test v3\n".to_string(),
}],
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(5));
backend
.handle_did_close(DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
})
.await;
assert_eq!(backend.get_document_version(&uri).await, None);
backend
.handle_did_open(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "markdown".to_string(),
version: 1,
text: "---\nname: lifecycle\nversion: 1.0.0\nmodel: sonnet\n---\n# Lifecycle Reopened\n".to_string(),
},
})
.await;
assert_eq!(backend.get_document_version(&uri).await, Some(1));
}