#![allow(clippy::unwrap_used)]
use crate::semantic::Workspace;
use crate::semantic::adapters::syntax_factory::populate_syntax_file;
use crate::semantic::graphs::ReferenceIndex;
use crate::semantic::processors::SemanticTokenCollector;
use crate::semantic::symbol_table::SymbolTable;
use crate::syntax::SyntaxFile;
use crate::syntax::parser::parse_content;
use std::path::PathBuf;
fn parse_sysml(source: &str) -> SyntaxFile {
let path = PathBuf::from("test.sysml");
parse_content(source, &path).expect("Parse should succeed")
}
#[test]
fn test_stdlib_package_with_types() {
let source = r#"standard library package AnalysisTooling {
doc
/*
* This package contains definitions for metadata annotations related
* to analysis tool integration.
*/
private import ScalarValues::*;
metadata def ToolExecution {
doc
/*
* ToolExecution metadata identifies an external analysis tool to be
* used to implement the annotated action.
*/
attribute toolName : String;
attribute uri : String;
}
}"#;
let syntax_file = parse_sysml(source);
let mut workspace = Workspace::<SyntaxFile>::new();
let path = PathBuf::from("test.sysml");
workspace.add_file(path.clone(), syntax_file);
workspace.populate_file(&path).expect("Failed to populate");
let tokens = SemanticTokenCollector::collect_from_workspace(&workspace, "test.sysml");
let lines: Vec<&str> = source.lines().collect();
assert!(
!tokens.is_empty(),
"Expected at least 1 token, got {}",
tokens.len()
);
let pkg_token = tokens.iter().find(|t| t.line == 0);
assert!(pkg_token.is_some(), "Should have token on line 0");
let pkg_text: String = lines[0]
.chars()
.skip(pkg_token.unwrap().column as usize)
.take(pkg_token.unwrap().length as usize)
.collect();
assert_eq!(
pkg_text, "AnalysisTooling",
"Package token should highlight 'AnalysisTooling'"
);
let metadata_def_token = tokens.iter().find(|t| t.line == 10);
if let Some(tok) = metadata_def_token {
let text: String = lines[10]
.chars()
.skip(tok.column as usize)
.take(tok.length as usize)
.collect();
assert_eq!(text, "ToolExecution", "Should capture metadata def name");
}
}
#[test]
fn test_kerml_classifiers() {
let source = r#"package TestPkg {
classifier MyClassifier;
class MyClass;
feature myFeature;
}"#;
let path = PathBuf::from("test.kerml");
let syntax_file = parse_content(source, &path).expect("Parse should succeed");
let mut symbol_table = SymbolTable::new();
let mut reference_index = ReferenceIndex::new();
symbol_table.set_current_file(Some("test.kerml".to_string()));
let result = populate_syntax_file(&syntax_file, &mut symbol_table, &mut reference_index);
assert!(result.is_ok(), "Symbol population failed: {result:?}");
let tokens = SemanticTokenCollector::collect_from_symbols(&symbol_table, "test.kerml");
let lines: Vec<&str> = source.lines().collect();
assert!(
tokens.len() >= 3,
"Expected at least 3 tokens, got {}",
tokens.len()
);
let classifier_token = tokens.iter().find(|t| t.line == 1);
assert!(
classifier_token.is_some(),
"Should have token on line 1 (classifier)"
);
let classifier_text: String = lines[1]
.chars()
.skip(classifier_token.unwrap().column as usize)
.take(classifier_token.unwrap().length as usize)
.collect();
assert_eq!(
classifier_text, "MyClassifier",
"Classifier token should be 'MyClassifier'"
);
let class_token = tokens.iter().find(|t| t.line == 2);
assert!(class_token.is_some(), "Should have token on line 2 (class)");
let class_text: String = lines[2]
.chars()
.skip(class_token.unwrap().column as usize)
.take(class_token.unwrap().length as usize)
.collect();
assert_eq!(class_text, "MyClass", "Class token should be 'MyClass'");
}
#[test]
fn test_attribute_definitions_and_usages() {
let source = r#"package TestPackage {
part def Vehicle {
attribute mass: Real;
}
part myVehicle : Vehicle;
}"#;
let syntax_file = parse_sysml(source);
let mut symbol_table = SymbolTable::new();
let mut reference_index = ReferenceIndex::new();
symbol_table.set_current_file(Some("test.sysml".to_string()));
let result = populate_syntax_file(&syntax_file, &mut symbol_table, &mut reference_index);
assert!(result.is_ok(), "Symbol population failed: {result:?}");
let tokens = SemanticTokenCollector::collect_from_symbols(&symbol_table, "test.sysml");
let lines: Vec<&str> = source.lines().collect();
assert!(
tokens.len() >= 4,
"Expected at least 4 tokens, got {}",
tokens.len()
);
let mass_token = tokens.iter().find(|t| t.line == 2);
assert!(
mass_token.is_some(),
"Should have a token on line 2 (attribute mass)"
);
let mass_token = mass_token.unwrap();
let mass_line = lines[2];
let mass_text: String = mass_line
.chars()
.skip(mass_token.column as usize)
.take(mass_token.length as usize)
.collect();
assert_eq!(mass_text, "mass", "Attribute token should highlight 'mass'");
}
#[test]
fn test_semantic_token_text_extraction() {
let source = r#"standard library package QuantityTest {
abstract attribute def TensorQuantityValue;
attribute def ScalarQuantityValue;
part vehicle: Vehicle;
}"#;
let syntax_file = parse_sysml(source);
let mut symbol_table = SymbolTable::new();
let mut reference_index = ReferenceIndex::new();
symbol_table.set_current_file(Some("test.sysml".to_string()));
let populate_result =
populate_syntax_file(&syntax_file, &mut symbol_table, &mut reference_index);
assert!(
populate_result.is_ok(),
"Symbol population failed: {populate_result:?}"
);
let tokens = SemanticTokenCollector::collect_from_symbols(&symbol_table, "test.sysml");
let lines: Vec<&str> = source.lines().collect();
for (i, token) in tokens.iter().enumerate() {
let line_text = lines.get(token.line as usize).unwrap_or(&"");
let start_char = token.column as usize;
let _end_char = start_char + token.length as usize;
let _start_byte: usize = line_text
.chars()
.take(start_char)
.map(|c| c.len_utf8())
.sum();
let char_slice: String = line_text
.chars()
.skip(start_char)
.take(token.length as usize)
.collect();
assert!(
!char_slice.is_empty(),
"Token {} has empty text extraction at line {} col {}",
i,
token.line,
token.column
);
}
assert!(
tokens.len() >= 3,
"Should have at least 3 tokens (package, definitions, usage)"
);
let package_token = tokens
.iter()
.find(|t| t.line == 0)
.expect("Should have token on line 0");
let package_line = lines[0];
let package_text: String = package_line
.chars()
.skip(package_token.column as usize)
.take(package_token.length as usize)
.collect();
assert!(
!package_text.is_empty(),
"Package token should extract some text (even if wrong position)"
);
if let Some(def_token) = tokens.iter().find(|t| t.line == 1) {
let def_line = lines[1];
let def_text: String = def_line
.chars()
.skip(def_token.column as usize)
.take(def_token.length as usize)
.collect();
assert!(
!def_text.is_empty(),
"Definition token should extract some text"
);
}
}
#[test]
fn test_kerml_nested_packages_semantic_tokens() {
let source = r#"package Outer {
package Inner {
classifier Nested;
feature myFeature : SomeType;
}
}"#;
let path = PathBuf::from("test.kerml");
let syntax_file = parse_content(source, &path).expect("Parse should succeed");
let mut workspace = Workspace::<SyntaxFile>::new();
workspace.add_file(path.clone(), syntax_file);
workspace.populate_file(&path).expect("Failed to populate");
let tokens = SemanticTokenCollector::collect_from_workspace(&workspace, "test.kerml");
assert!(
tokens.len() >= 3,
"Expected at least 3 tokens for KerML nested packages, got {}",
tokens.len()
);
let has_nested_token = tokens.iter().any(|t| t.line >= 2);
assert!(
has_nested_token,
"Should have tokens from nested elements (line 2+)"
);
}
#[test]
fn test_metadata_def_with_short_name_and_specialization() {
let source = r#"metadata def <original> OriginalRequirementMetadata :> SemanticMetadata {
doc /* Metadata for original requirements. */
}"#;
let syntax_file = parse_sysml(source);
let mut workspace = Workspace::<SyntaxFile>::new();
let path = PathBuf::from("test.sysml");
workspace.add_file(path.clone(), syntax_file);
workspace.populate_file(&path).expect("Failed to populate");
let tokens = SemanticTokenCollector::collect_from_workspace(&workspace, "test.sysml");
println!("Symbols in symbol table:");
for symbol in workspace.symbol_table().get_symbols_for_file("test.sysml") {
println!(" {:?}", symbol);
}
println!("Tokens found: {:?}", tokens);
for token in &tokens {
println!(
" line={}, col={}, len={}, type={:?}",
token.line, token.column, token.length, token.token_type
);
}
assert!(
!tokens.is_empty(),
"Expected semantic tokens for metadata def, got none"
);
let has_def_token = tokens
.iter()
.any(|t| t.line == 0 && t.column >= 24 && t.column <= 52);
assert!(
has_def_token,
"Expected token for 'OriginalRequirementMetadata' on line 0"
);
}