#[cfg(feature = "lsp")]
use std::sync::Arc;
#[cfg(feature = "lsp")]
use tokio::sync::RwLock;
#[cfg(feature = "lsp")]
use tower_lsp_server::jsonrpc::Result;
#[cfg(feature = "lsp")]
use tower_lsp_server::ls_types::*;
#[cfg(feature = "lsp")]
use tower_lsp_server::{Client, LanguageServer};
#[cfg(feature = "lsp")]
use crate::ast::analyzer::AnalyzeError;
#[cfg(feature = "lsp")]
use crate::ast::raw;
#[cfg(feature = "lsp")]
use super::ast_index::AstIndex;
#[cfg(feature = "lsp")]
use super::capabilities::server_capabilities;
#[cfg(feature = "lsp")]
use super::conversion::span_to_range;
#[cfg(feature = "lsp")]
use super::document_store::DocumentStore;
#[cfg(feature = "lsp")]
use super::handlers;
#[cfg(feature = "lsp")]
pub struct NikaLanguageServer {
client: Client,
documents: Arc<RwLock<DocumentStore>>,
ast_index: AstIndex,
}
#[cfg(feature = "lsp")]
impl NikaLanguageServer {
pub fn new(client: Client) -> Self {
Self {
client,
documents: Arc::new(RwLock::new(DocumentStore::new())),
ast_index: AstIndex::new(),
}
}
async fn analyze_document(&self, uri: &Uri, text: &str) {
let errors = self.ast_index.parse_document(uri, text, 0);
let mut diagnostics = self.errors_to_diagnostics(&errors, text);
if let Some(parse_error) = self.ast_index.get_parse_error(uri) {
diagnostics.push(self.parse_error_to_diagnostic(&parse_error, text));
}
diagnostics.extend(self.model_compatibility_diagnostics(uri, text));
self.client
.publish_diagnostics(uri.clone(), diagnostics, None)
.await;
}
fn errors_to_diagnostics(&self, errors: &[AnalyzeError], source: &str) -> Vec<Diagnostic> {
errors
.iter()
.map(|e| Diagnostic {
range: span_to_range(&e.span, source),
severity: Some(DiagnosticSeverity::ERROR), code: Some(NumberOrString::String(e.kind.code().to_string())),
code_description: None,
source: Some("nika".to_string()),
message: e.message.clone(),
related_information: None,
tags: None,
data: None,
})
.collect()
}
fn parse_error_to_diagnostic(&self, error: &raw::ParseError, source: &str) -> Diagnostic {
Diagnostic {
range: span_to_range(&error.span, source),
severity: Some(DiagnosticSeverity::ERROR),
code: Some(NumberOrString::String(error.kind.code().to_string())),
code_description: None,
source: Some("nika".to_string()),
message: error.message.clone(),
related_information: None,
tags: None,
data: None,
}
}
fn model_compatibility_diagnostics(&self, uri: &Uri, source: &str) -> Vec<Diagnostic> {
check_model_compatibility(&self.ast_index, uri, source)
}
}
#[cfg(feature = "lsp")]
fn check_model_compatibility(ast_index: &AstIndex, uri: &Uri, source: &str) -> Vec<Diagnostic> {
use crate::ast::analyzed::{AnalyzedTaskAction, OutputFormat};
use crate::lsp::model_intel::{self, IssueSeverity, TaskModelConfig};
let cached = match ast_index.get(uri) {
Some(c) => c,
None => return Vec::new(),
};
let analyzed = match cached.analyzed.as_ref() {
Some(a) => a,
None => return Vec::new(),
};
let mut diagnostics = Vec::new();
for task in &analyzed.tasks {
let model_id = task.model.clone();
if model_id.is_none() {
continue; }
let (extended_thinking, tool_choice_required) = match &task.action {
AnalyzedTaskAction::Infer(infer) => (infer.thinking.unwrap_or(false), false),
AnalyzedTaskAction::Agent(agent) => {
let et = agent.extended_thinking.unwrap_or(false);
let tc = agent
.tool_choice
.as_deref()
.map_or(false, |v| v == "required");
(et, tc)
}
_ => (false, false),
};
let json_output = task
.output
.as_ref()
.map_or(false, |o| o.format == OutputFormat::Json);
let config = TaskModelConfig {
model_id,
provider: task.provider.clone(),
extended_thinking,
json_output,
tool_choice_required,
};
let issues = model_intel::check_compatibility(&config);
for issue in issues {
let severity = match issue.severity {
IssueSeverity::Error => DiagnosticSeverity::ERROR,
IssueSeverity::Warning => DiagnosticSeverity::WARNING,
};
let range = span_to_range(&task.span, source);
diagnostics.push(Diagnostic {
range,
severity: Some(severity),
code: Some(NumberOrString::String(issue.code.to_string())),
code_description: None,
source: Some("nika".to_string()),
message: issue.message,
related_information: None,
tags: if issue.code == "NIKA-033" {
Some(vec![DiagnosticTag::DEPRECATED])
} else {
None
},
data: None,
});
}
}
diagnostics
}
#[cfg(feature = "lsp")]
impl LanguageServer for NikaLanguageServer {
async fn initialize(&self, _params: InitializeParams) -> Result<InitializeResult> {
Ok(InitializeResult {
capabilities: server_capabilities(),
server_info: Some(ServerInfo {
name: "nika-lsp".to_string(),
version: Some(env!("CARGO_PKG_VERSION").to_string()),
}),
offset_encoding: None,
})
}
async fn initialized(&self, _params: InitializedParams) {
self.client
.log_message(MessageType::INFO, "Nika language server initialized!")
.await;
}
async fn shutdown(&self) -> Result<()> {
Ok(())
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
let uri = params.text_document.uri;
let text = params.text_document.text;
{
let mut docs = self.documents.write().await;
docs.insert(uri.clone(), text.clone());
}
self.analyze_document(&uri, &text).await;
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
let uri = params.text_document.uri;
let text = {
let mut docs = self.documents.write().await;
if !docs.contains(&uri) {
self.client
.log_message(
MessageType::WARNING,
format!("Received did_change for unopened document: {:?}", uri),
)
.await;
return;
}
for change in params.content_changes {
docs.apply_change(&uri, change);
}
docs.get(&uri).cloned().unwrap_or_default()
};
self.analyze_document(&uri, &text).await;
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {
if let Some(text) = params.text {
self.analyze_document(¶ms.text_document.uri, &text)
.await;
}
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
let uri = params.text_document.uri;
{
let mut docs = self.documents.write().await;
docs.remove(&uri);
}
self.client.publish_diagnostics(uri, vec![], None).await;
}
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
let uri = ¶ms.text_document_position.text_document.uri;
let position = params.text_document_position.position;
let docs = self.documents.read().await;
let text = docs.get(uri).cloned().unwrap_or_default();
let offset = super::conversion::position_to_offset(position, &text) as u32;
let context = nika_lsp_core::analysis::context::detect_context(&text, offset, None);
let items = nika_lsp_core::handlers::completion::completions(&text, offset, &context);
if !items.is_empty() {
return Ok(Some(CompletionResponse::Array(items)));
}
let completions = handlers::completion::compute_completions_with_ast(
&self.ast_index,
uri,
&text,
position,
);
Ok(Some(CompletionResponse::Array(completions)))
}
async fn hover(&self, params: HoverParams) -> Result<Option<Hover>> {
let uri = ¶ms.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let docs = self.documents.read().await;
let text = docs.get(uri).cloned().unwrap_or_default();
Ok(handlers::hover::compute_hover_with_ast(
&self.ast_index,
uri,
&text,
position,
))
}
async fn goto_definition(
&self,
params: GotoDefinitionParams,
) -> Result<Option<GotoDefinitionResponse>> {
let uri = ¶ms.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let docs = self.documents.read().await;
let text = docs.get(uri).cloned().unwrap_or_default();
Ok(handlers::definition::find_definition_with_ast(
&self.ast_index,
uri,
&text,
position,
))
}
async fn code_action(&self, params: CodeActionParams) -> Result<Option<CodeActionResponse>> {
let uri = ¶ms.text_document.uri;
let range = params.range;
let diagnostics = ¶ms.context.diagnostics;
let docs = self.documents.read().await;
let text = docs.get(uri).cloned().unwrap_or_default();
let actions = handlers::code_action::compute_code_actions_with_ast(
&self.ast_index,
uri,
&text,
range,
diagnostics,
);
Ok(Some(actions))
}
async fn document_symbol(
&self,
params: DocumentSymbolParams,
) -> Result<Option<DocumentSymbolResponse>> {
let uri = ¶ms.text_document.uri;
let docs = self.documents.read().await;
let text = docs.get(uri).cloned().unwrap_or_default();
let symbols =
handlers::symbols::compute_document_symbols_with_ast(&self.ast_index, uri, &text);
Ok(Some(DocumentSymbolResponse::Nested(symbols)))
}
async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
tracing::info!("Configuration changed: {:?}", params.settings);
let docs = self.documents.read().await;
let uris: Vec<_> = docs.uris().cloned().collect();
for uri in &uris {
if let Some(text) = docs.get(uri) {
self.ast_index.parse_document(uri, text, 0);
}
}
drop(docs);
self.client
.log_message(MessageType::INFO, "Configuration updated")
.await;
}
async fn semantic_tokens_full(
&self,
params: SemanticTokensParams,
) -> Result<Option<SemanticTokensResult>> {
let uri = ¶ms.text_document.uri;
let docs = self.documents.read().await;
let text = docs.get(uri).cloned().unwrap_or_default();
let raw_tokens = handlers::semantic_tokens::compute_semantic_tokens_with_ast(
&self.ast_index,
uri,
&text,
);
let encoded = handlers::semantic_tokens::encode_tokens(raw_tokens);
Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
result_id: None,
data: encoded,
})))
}
}
#[cfg(not(feature = "lsp"))]
pub struct NikaLanguageServer;
#[cfg(not(feature = "lsp"))]
impl NikaLanguageServer {
pub fn new() -> Self {
Self
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_server_stub_compiles() {
}
#[test]
#[cfg(feature = "lsp")]
fn test_deprecated_model_emits_warning_diagnostic() {
use super::*;
use tower_lsp_server::ls_types::Uri;
let ast_index = AstIndex::new();
let uri = "file:///test.nika.yaml".parse::<Uri>().unwrap();
let text = r#"schema: nika/workflow@0.12
workflow: test
tasks:
- id: step1
model: gpt-4-turbo
infer: "Hello"
"#;
ast_index.parse_document(&uri, text, 1);
let diags = check_model_compatibility(&ast_index, &uri, text);
assert!(
!diags.is_empty(),
"Should emit diagnostics for deprecated model"
);
assert!(
diags
.iter()
.any(|d| d.severity == Some(DiagnosticSeverity::WARNING)
&& d.code == Some(NumberOrString::String("NIKA-033".to_string()))),
"Should have NIKA-033 warning for deprecated model: {:?}",
diags
);
let deprecated_diag = diags
.iter()
.find(|d| d.code == Some(NumberOrString::String("NIKA-033".to_string())))
.unwrap();
assert!(
deprecated_diag
.tags
.as_ref()
.map_or(false, |t| t.contains(&DiagnosticTag::DEPRECATED)),
"NIKA-033 should have DEPRECATED tag"
);
}
#[test]
#[cfg(feature = "lsp")]
fn test_extended_thinking_with_non_claude_emits_error() {
use super::*;
use tower_lsp_server::ls_types::Uri;
let ast_index = AstIndex::new();
let uri = "file:///test.nika.yaml".parse::<Uri>().unwrap();
let text = r#"schema: nika/workflow@0.12
workflow: test
tasks:
- id: step1
model: gpt-4o
infer:
prompt: "Hello"
extended_thinking: true
"#;
ast_index.parse_document(&uri, text, 1);
let diags = check_model_compatibility(&ast_index, &uri, text);
assert!(
diags
.iter()
.any(|d| d.severity == Some(DiagnosticSeverity::ERROR)
&& d.code == Some(NumberOrString::String("NIKA-032".to_string()))),
"Should have NIKA-032 error for extended_thinking with non-Claude model: {:?}",
diags
);
}
#[test]
#[cfg(feature = "lsp")]
fn test_extended_thinking_with_claude_no_error() {
use super::*;
use tower_lsp_server::ls_types::Uri;
let ast_index = AstIndex::new();
let uri = "file:///test.nika.yaml".parse::<Uri>().unwrap();
let text = r#"schema: nika/workflow@0.12
workflow: test
tasks:
- id: step1
model: claude-sonnet-4-6
infer:
prompt: "Hello"
extended_thinking: true
"#;
ast_index.parse_document(&uri, text, 1);
let diags = check_model_compatibility(&ast_index, &uri, text);
assert!(
!diags.iter().any(
|d| d.code == Some(NumberOrString::String("NIKA-032".to_string()))
&& d.severity == Some(DiagnosticSeverity::ERROR)
),
"Should NOT have NIKA-032 error for Claude with extended_thinking"
);
}
#[test]
#[cfg(feature = "lsp")]
fn test_active_model_no_deprecation_diagnostic() {
use super::*;
use tower_lsp_server::ls_types::Uri;
let ast_index = AstIndex::new();
let uri = "file:///test.nika.yaml".parse::<Uri>().unwrap();
let text = r#"schema: nika/workflow@0.12
workflow: test
tasks:
- id: step1
model: gpt-4o
infer: "Hello"
"#;
ast_index.parse_document(&uri, text, 1);
let diags = check_model_compatibility(&ast_index, &uri, text);
assert!(
!diags
.iter()
.any(|d| d.code == Some(NumberOrString::String("NIKA-033".to_string()))),
"Should NOT emit NIKA-033 for active model"
);
}
#[test]
#[cfg(feature = "lsp")]
fn test_no_model_no_diagnostics() {
use super::*;
use tower_lsp_server::ls_types::Uri;
let ast_index = AstIndex::new();
let uri = "file:///test.nika.yaml".parse::<Uri>().unwrap();
let text = r#"schema: nika/workflow@0.12
workflow: test
tasks:
- id: step1
infer: "Hello"
"#;
ast_index.parse_document(&uri, text, 1);
let diags = check_model_compatibility(&ast_index, &uri, text);
assert!(
diags.is_empty(),
"Should emit no model diagnostics when no model specified"
);
}
#[test]
#[cfg(feature = "lsp")]
fn test_unknown_model_no_diagnostics() {
use super::*;
use tower_lsp_server::ls_types::Uri;
let ast_index = AstIndex::new();
let uri = "file:///test.nika.yaml".parse::<Uri>().unwrap();
let text = r#"schema: nika/workflow@0.12
workflow: test
tasks:
- id: step1
model: some-custom-model
infer: "Hello"
"#;
ast_index.parse_document(&uri, text, 1);
let diags = check_model_compatibility(&ast_index, &uri, text);
assert!(
diags.is_empty(),
"Should emit no diagnostics for unknown/custom model"
);
}
#[test]
#[cfg(feature = "lsp")]
fn test_unparseable_document_no_crash() {
use super::*;
use tower_lsp_server::ls_types::Uri;
let ast_index = AstIndex::new();
let uri = "file:///test.nika.yaml".parse::<Uri>().unwrap();
let text = "this is not valid yaml: [[[";
ast_index.parse_document(&uri, text, 1);
let diags = check_model_compatibility(&ast_index, &uri, text);
let _ = diags;
}
}