use super::syntax_kind::SyntaxKind;
use super::{SyntaxNode, SyntaxToken};
#[inline]
fn is_name_token(kind: SyntaxKind) -> bool {
matches!(
kind,
SyntaxKind::IDENT
| SyntaxKind::START_KW
| SyntaxKind::END_KW
| SyntaxKind::DONE_KW
| SyntaxKind::THIS_KW
)
}
#[inline]
fn strip_unrestricted_name(text: &str) -> String {
if text.starts_with('\'') && text.ends_with('\'') && text.len() > 1 {
text[1..text.len() - 1].to_string()
} else {
text.to_string()
}
}
#[inline]
fn has_token(node: &SyntaxNode, kind: SyntaxKind) -> bool {
node.children_with_tokens()
.filter_map(|e| e.into_token())
.any(|t| t.kind() == kind)
}
#[inline]
fn find_name_token(node: &SyntaxNode) -> Option<SyntaxToken> {
node.children_with_tokens()
.filter_map(|e| e.into_token())
.find(|t| is_name_token(t.kind()))
}
macro_rules! has_token_method {
($name:ident, $kind:ident) => {
#[doc = concat!("Check if this node has the `", stringify!($kind), "` token.")]
pub fn $name(&self) -> bool {
has_token(&self.0, SyntaxKind::$kind)
}
};
($name:ident, $kind:ident, $example:literal) => {
#[doc = concat!("Check if this node has the `", stringify!($kind), "` token (e.g., `", $example, "`).")]
pub fn $name(&self) -> bool {
has_token(&self.0, SyntaxKind::$kind)
}
};
}
macro_rules! first_child_method {
($name:ident, $type:ident) => {
#[doc = concat!("Get the first `", stringify!($type), "` child of this node.")]
pub fn $name(&self) -> Option<$type> {
self.0.children().find_map($type::cast)
}
};
}
macro_rules! children_method {
($name:ident, $type:ident) => {
#[doc = concat!("Get all `", stringify!($type), "` children of this node.")]
pub fn $name(&self) -> impl Iterator<Item = $type> + '_ {
self.0.children().filter_map($type::cast)
}
};
}
macro_rules! children_vec_method {
($name:ident, $type:ident) => {
#[doc = concat!("Get all `", stringify!($type), "` children of this node as a Vec.")]
pub fn $name(&self) -> Vec<$type> {
self.0.children().filter_map($type::cast).collect()
}
};
}
macro_rules! descendants_method {
($name:ident, $type:ident) => {
#[doc = concat!("Get all `", stringify!($type), "` descendants of this node.")]
pub fn $name(&self) -> impl Iterator<Item = $type> + '_ {
self.0.descendants().filter_map($type::cast)
}
};
($name:ident, $type:ident, $doc:literal) => {
#[doc = $doc]
pub fn $name(&self) -> impl Iterator<Item = $type> + '_ {
self.0.descendants().filter_map($type::cast)
}
};
}
macro_rules! child_after_keyword_method {
($name:ident, $type:ident, $keyword:ident, $doc:literal) => {
#[doc = $doc]
pub fn $name(&self) -> Option<$type> {
let mut seen_keyword = false;
for child in self.0.children_with_tokens() {
match child {
rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::$keyword => {
seen_keyword = true;
}
rowan::NodeOrToken::Node(n) if seen_keyword => {
if let Some(result) = $type::cast(n) {
return Some(result);
}
}
_ => {}
}
}
None
}
};
}
macro_rules! body_members_method {
() => {
pub fn members(&self) -> impl Iterator<Item = NamespaceMember> + '_ {
self.body()
.into_iter()
.flat_map(|body| body.members().collect::<Vec<_>>())
}
};
}
macro_rules! find_token_kind_method {
($name:ident, [$($kind:ident),+ $(,)?], $doc:literal) => {
#[doc = $doc]
pub fn $name(&self) -> Option<SyntaxKind> {
self.0
.children_with_tokens()
.filter_map(|e| e.into_token())
.find(|t| matches!(t.kind(), $(SyntaxKind::$kind)|+))
.map(|t| t.kind())
}
};
}
macro_rules! source_target_pair {
($source:ident, $target:ident, $iter_method:ident, $type:ident) => {
#[doc = concat!("Get the first `", stringify!($type), "` (source).")]
pub fn $source(&self) -> Option<$type> {
self.$iter_method().next()
}
#[doc = concat!("Get the second `", stringify!($type), "` (target).")]
pub fn $target(&self) -> Option<$type> {
self.$iter_method().nth(1)
}
};
}
macro_rules! token_to_enum_method {
($name:ident, $enum_type:ident, [$($token:ident => $variant:ident),+ $(,)?]) => {
pub fn $name(&self) -> Option<$enum_type> {
for token in self.0.children_with_tokens().filter_map(|e| e.into_token()) {
match token.kind() {
$(SyntaxKind::$token => return Some($enum_type::$variant),)+
_ => {}
}
}
None
}
};
}
macro_rules! prefix_metadata_method {
() => {
pub fn prefix_metadata(&self) -> Vec<PrefixMetadata> {
collect_prefix_metadata(&self.0)
}
};
}
fn collect_prefix_metadata(node: &SyntaxNode) -> Vec<PrefixMetadata> {
let mut result = Vec::new();
let mut current = node.prev_sibling();
while let Some(sibling) = current {
if sibling.kind() == SyntaxKind::PREFIX_METADATA {
if let Some(pm) = PrefixMetadata::cast(sibling.clone()) {
result.push(pm);
}
current = sibling.prev_sibling();
} else {
break;
}
}
result.reverse();
result
}
fn split_at_keyword<T: AstNode>(node: &SyntaxNode, keyword: SyntaxKind) -> (Vec<T>, Vec<T>) {
let mut before = Vec::new();
let mut after = Vec::new();
let mut found_keyword = false;
for elem in node.children_with_tokens() {
if let Some(token) = elem.as_token() {
if token.kind() == keyword {
found_keyword = true;
}
} else if let Some(child) = elem.as_node() {
if let Some(item) = T::cast(child.clone()) {
if found_keyword {
after.push(item);
} else {
before.push(item);
}
}
}
}
(before, after)
}
pub trait AstNode: Sized {
fn can_cast(kind: SyntaxKind) -> bool;
fn cast(node: SyntaxNode) -> Option<Self>;
fn syntax(&self) -> &SyntaxNode;
fn descendants<T: AstNode>(&self) -> impl Iterator<Item = T> {
self.syntax().descendants().filter_map(T::cast)
}
fn doc_comment(&self) -> Option<String> {
extract_doc_comment(self.syntax())
}
}
pub fn extract_doc_comment(node: &SyntaxNode) -> Option<String> {
let mut comments = Vec::new();
let mut current = node.prev_sibling_or_token();
while let Some(node_or_token) = current {
match node_or_token {
rowan::NodeOrToken::Token(ref t) => {
match t.kind() {
SyntaxKind::WHITESPACE => {
current = t.prev_sibling_or_token();
}
SyntaxKind::BLOCK_COMMENT => {
let text = t.text();
let content = text
.strip_prefix("/*")
.and_then(|s| s.strip_suffix("*/"))
.map(clean_doc_comment)
.unwrap_or_default();
if !content.is_empty() {
comments.push(content);
}
break;
}
SyntaxKind::LINE_COMMENT => {
let text = t.text();
let content = text.strip_prefix("//").unwrap_or(text).trim();
if !content.is_empty() {
comments.push(content.to_string());
}
current = t.prev_sibling_or_token();
}
_ => break, }
}
rowan::NodeOrToken::Node(ref n) => {
if n.kind() == SyntaxKind::COMMENT_ELEMENT {
for child in n.children_with_tokens() {
if let rowan::NodeOrToken::Token(t) = child {
if t.kind() == SyntaxKind::BLOCK_COMMENT {
let text = t.text();
let content = text
.strip_prefix("/*")
.and_then(|s| s.strip_suffix("*/"))
.map(clean_doc_comment)
.unwrap_or_default();
if !content.is_empty() {
comments.push(content);
}
break;
}
}
}
break;
} else {
break;
}
}
}
}
if comments.is_empty() {
return None;
}
comments.reverse();
Some(comments.join("\n"))
}
fn clean_doc_comment(s: &str) -> String {
s.lines()
.map(|line| {
let trimmed = line.trim();
if let Some(rest) = trimmed.strip_prefix('*') {
rest.trim_start().to_string()
} else {
trimmed.to_string()
}
})
.filter(|line| !line.is_empty())
.collect::<Vec<_>>()
.join("\n")
}
pub trait AstToken: Sized {
fn can_cast(kind: SyntaxKind) -> bool;
fn cast(token: SyntaxToken) -> Option<Self>;
fn syntax(&self) -> &SyntaxToken;
fn text(&self) -> &str {
self.syntax().text()
}
}
macro_rules! ast_node {
($name:ident, $kind:ident) => {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct $name(SyntaxNode);
impl AstNode for $name {
fn can_cast(kind: SyntaxKind) -> bool {
kind == SyntaxKind::$kind
}
fn cast(node: SyntaxNode) -> Option<Self> {
if Self::can_cast(node.kind()) {
Some(Self(node))
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode {
&self.0
}
}
};
}
ast_node!(SourceFile, SOURCE_FILE);
impl SourceFile {
children_method!(members, NamespaceMember);
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum NamespaceMember {
Package(Package),
LibraryPackage(LibraryPackage),
Import(Import),
Alias(Alias),
Dependency(Dependency),
Definition(Definition),
Usage(Usage),
Filter(ElementFilter),
Metadata(MetadataUsage),
Comment(Comment),
Bind(BindingConnector),
Succession(Succession),
Transition(TransitionUsage),
Connector(Connector),
ConnectUsage(ConnectUsage),
SendAction(SendActionUsage),
AcceptAction(AcceptActionUsage),
StateSubaction(StateSubaction),
ControlNode(ControlNode),
ForLoop(ForLoopActionUsage),
IfAction(IfActionUsage),
WhileLoop(WhileLoopActionUsage),
}
impl AstNode for NamespaceMember {
fn can_cast(kind: SyntaxKind) -> bool {
matches!(
kind,
SyntaxKind::PACKAGE
| SyntaxKind::LIBRARY_PACKAGE
| SyntaxKind::IMPORT
| SyntaxKind::ALIAS_MEMBER
| SyntaxKind::DEPENDENCY
| SyntaxKind::DEFINITION
| SyntaxKind::USAGE
| SyntaxKind::SUBJECT_USAGE
| SyntaxKind::ACTOR_USAGE
| SyntaxKind::STAKEHOLDER_USAGE
| SyntaxKind::OBJECTIVE_USAGE
| SyntaxKind::ELEMENT_FILTER_MEMBER
| SyntaxKind::METADATA_USAGE
| SyntaxKind::COMMENT_ELEMENT
| SyntaxKind::BINDING_CONNECTOR
| SyntaxKind::SUCCESSION
| SyntaxKind::TRANSITION_USAGE
| SyntaxKind::CONNECTOR
| SyntaxKind::CONNECT_USAGE
| SyntaxKind::SEND_ACTION_USAGE
| SyntaxKind::ACCEPT_ACTION_USAGE
| SyntaxKind::STATE_SUBACTION
| SyntaxKind::CONTROL_NODE
| SyntaxKind::FOR_LOOP_ACTION_USAGE
| SyntaxKind::IF_ACTION_USAGE
| SyntaxKind::WHILE_LOOP_ACTION_USAGE
)
}
fn cast(node: SyntaxNode) -> Option<Self> {
match node.kind() {
SyntaxKind::PACKAGE => Some(Self::Package(Package(node))),
SyntaxKind::LIBRARY_PACKAGE => Some(Self::LibraryPackage(LibraryPackage(node))),
SyntaxKind::IMPORT => Some(Self::Import(Import(node))),
SyntaxKind::ALIAS_MEMBER => Some(Self::Alias(Alias(node))),
SyntaxKind::DEPENDENCY => Some(Self::Dependency(Dependency(node))),
SyntaxKind::DEFINITION => Some(Self::Definition(Definition(node))),
SyntaxKind::USAGE
| SyntaxKind::SUBJECT_USAGE
| SyntaxKind::ACTOR_USAGE
| SyntaxKind::STAKEHOLDER_USAGE
| SyntaxKind::OBJECTIVE_USAGE => Some(Self::Usage(Usage(node))),
SyntaxKind::ELEMENT_FILTER_MEMBER => Some(Self::Filter(ElementFilter(node))),
SyntaxKind::METADATA_USAGE => Some(Self::Metadata(MetadataUsage(node))),
SyntaxKind::COMMENT_ELEMENT => Some(Self::Comment(Comment(node))),
SyntaxKind::BINDING_CONNECTOR => Some(Self::Bind(BindingConnector(node))),
SyntaxKind::SUCCESSION => Some(Self::Succession(Succession(node))),
SyntaxKind::TRANSITION_USAGE => Some(Self::Transition(TransitionUsage(node))),
SyntaxKind::CONNECTOR => Some(Self::Connector(Connector(node))),
SyntaxKind::CONNECT_USAGE => Some(Self::ConnectUsage(ConnectUsage(node))),
SyntaxKind::SEND_ACTION_USAGE => Some(Self::SendAction(SendActionUsage(node))),
SyntaxKind::ACCEPT_ACTION_USAGE => Some(Self::AcceptAction(AcceptActionUsage(node))),
SyntaxKind::STATE_SUBACTION => Some(Self::StateSubaction(StateSubaction(node))),
SyntaxKind::CONTROL_NODE => Some(Self::ControlNode(ControlNode(node))),
SyntaxKind::FOR_LOOP_ACTION_USAGE => Some(Self::ForLoop(ForLoopActionUsage(node))),
SyntaxKind::IF_ACTION_USAGE => Some(Self::IfAction(IfActionUsage(node))),
SyntaxKind::WHILE_LOOP_ACTION_USAGE => {
Some(Self::WhileLoop(WhileLoopActionUsage(node)))
}
_ => None,
}
}
fn syntax(&self) -> &SyntaxNode {
match self {
Self::Package(n) => n.syntax(),
Self::LibraryPackage(n) => n.syntax(),
Self::Import(n) => n.syntax(),
Self::Alias(n) => n.syntax(),
Self::Dependency(n) => n.syntax(),
Self::Definition(n) => n.syntax(),
Self::Usage(n) => n.syntax(),
Self::Filter(n) => n.syntax(),
Self::Metadata(n) => n.syntax(),
Self::Comment(n) => n.syntax(),
Self::Bind(n) => n.syntax(),
Self::Succession(n) => n.syntax(),
Self::Transition(n) => n.syntax(),
Self::Connector(n) => n.syntax(),
Self::ConnectUsage(n) => n.syntax(),
Self::SendAction(n) => n.syntax(),
Self::AcceptAction(n) => n.syntax(),
Self::StateSubaction(n) => n.syntax(),
Self::ControlNode(n) => n.syntax(),
Self::ForLoop(n) => n.syntax(),
Self::IfAction(n) => n.syntax(),
Self::WhileLoop(n) => n.syntax(),
}
}
}
ast_node!(Package, PACKAGE);
impl Package {
first_child_method!(name, Name);
first_child_method!(body, NamespaceBody);
body_members_method!();
}
ast_node!(LibraryPackage, LIBRARY_PACKAGE);
impl LibraryPackage {
has_token_method!(is_standard, STANDARD_KW, "standard library package P {}");
first_child_method!(name, Name);
first_child_method!(body, NamespaceBody);
}
ast_node!(NamespaceBody, NAMESPACE_BODY);
impl NamespaceBody {
pub fn members(&self) -> impl Iterator<Item = NamespaceMember> + '_ {
self.0.children().flat_map(|child| {
if child.kind() == SyntaxKind::STATE_SUBACTION {
let nested: Vec<NamespaceMember> =
child.children().filter_map(NamespaceMember::cast).collect();
if nested.is_empty() {
StateSubaction::cast(child)
.map(NamespaceMember::StateSubaction)
.into_iter()
.collect()
} else {
nested
}
} else {
NamespaceMember::cast(child).into_iter().collect()
}
})
}
}
ast_node!(Import, IMPORT);
impl Import {
has_token_method!(is_all, ALL_KW, "import all P::*");
first_child_method!(target, QualifiedName);
has_token_method!(is_wildcard, STAR, "import P::*");
pub fn is_recursive(&self) -> bool {
let has_star_star = self
.0
.descendants_with_tokens()
.filter_map(|e| match e {
rowan::NodeOrToken::Token(t) => Some(t),
_ => None,
})
.any(|t| t.kind() == SyntaxKind::STAR_STAR);
if has_star_star {
return true;
}
let stars: Vec<_> = self
.0
.children_with_tokens()
.filter_map(|e| e.into_token())
.filter(|t| t.kind() == SyntaxKind::STAR)
.collect();
stars.len() >= 2
}
pub fn is_public(&self) -> bool {
if has_token(&self.0, SyntaxKind::PUBLIC_KW) {
return true;
}
if let Some(prev) = self.0.prev_sibling_or_token() {
let mut current = Some(prev);
while let Some(node_or_token) = current {
match node_or_token {
rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::PUBLIC_KW => {
return true;
}
rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::WHITESPACE => {
current = t.prev_sibling_or_token();
}
_ => break,
}
}
}
false
}
first_child_method!(filter, FilterPackage);
}
ast_node!(FilterPackage, FILTER_PACKAGE);
impl FilterPackage {
first_child_method!(target, QualifiedName);
children_vec_method!(targets, QualifiedName);
}
ast_node!(Alias, ALIAS_MEMBER);
impl Alias {
first_child_method!(name, Name);
first_child_method!(target, QualifiedName);
}
ast_node!(Dependency, DEPENDENCY);
impl Dependency {
children_method!(qualified_names, QualifiedName);
pub fn sources(&self) -> Vec<QualifiedName> {
split_at_keyword(&self.0, SyntaxKind::TO_KW).0
}
pub fn target(&self) -> Option<QualifiedName> {
split_at_keyword::<QualifiedName>(&self.0, SyntaxKind::TO_KW)
.1
.into_iter()
.next()
}
prefix_metadata_method!();
}
ast_node!(ElementFilter, ELEMENT_FILTER_MEMBER);
impl ElementFilter {
first_child_method!(expression, Expression);
pub fn metadata_refs(&self) -> Vec<String> {
let mut refs = Vec::new();
if let Some(expr) = self.expression() {
let mut at_seen = false;
for child in expr.syntax().children_with_tokens() {
match child {
rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::AT => {
at_seen = true;
}
rowan::NodeOrToken::Node(n)
if at_seen && n.kind() == SyntaxKind::QUALIFIED_NAME =>
{
if let Some(qn) = QualifiedName::cast(n) {
refs.push(qn.to_string());
}
at_seen = false;
}
_ => {}
}
}
}
refs
}
pub fn all_qualified_refs(&self) -> Vec<(String, rowan::TextRange)> {
let mut refs = Vec::new();
if let Some(expr) = self.expression() {
for node in expr.syntax().descendants() {
if node.kind() == SyntaxKind::QUALIFIED_NAME {
if let Some(qn) = QualifiedName::cast(node.clone()) {
refs.push((qn.to_string(), node.text_range()));
}
}
}
}
refs
}
}
ast_node!(Comment, COMMENT_ELEMENT);
impl Comment {
first_child_method!(name, Name);
children_method!(about_targets, QualifiedName);
has_token_method!(has_about, ABOUT_KW, "doc /* text */ about x");
}
ast_node!(MetadataUsage, METADATA_USAGE);
impl MetadataUsage {
first_child_method!(target, QualifiedName);
pub fn about_targets(&self) -> impl Iterator<Item = QualifiedName> + '_ {
self.0.children().filter_map(QualifiedName::cast).skip(1)
}
has_token_method!(has_about, ABOUT_KW, "@Rationale about x");
first_child_method!(body, NamespaceBody);
}
ast_node!(PrefixMetadata, PREFIX_METADATA);
impl PrefixMetadata {
fn ident_token(&self) -> Option<SyntaxToken> {
self.0
.children_with_tokens()
.filter_map(|e| e.into_token())
.find(|t| t.kind() == SyntaxKind::IDENT)
}
pub fn name(&self) -> Option<String> {
self.ident_token().map(|t| t.text().to_string())
}
pub fn name_range(&self) -> Option<rowan::TextRange> {
self.ident_token().map(|t| t.text_range())
}
}
ast_node!(Definition, DEFINITION);
impl Definition {
has_token_method!(is_abstract, ABSTRACT_KW, "abstract part def P {}");
has_token_method!(is_variation, VARIATION_KW, "variation part def V {}");
has_token_method!(is_individual, INDIVIDUAL_KW, "individual part def Earth;");
token_to_enum_method!(definition_kind, DefinitionKind, [
PART_KW => Part,
ATTRIBUTE_KW => Attribute,
PORT_KW => Port,
ITEM_KW => Item,
ACTION_KW => Action,
STATE_KW => State,
CONSTRAINT_KW => Constraint,
REQUIREMENT_KW => Requirement,
CASE_KW => Case,
CALC_KW => Calc,
CONNECTION_KW => Connection,
INTERFACE_KW => Interface,
ALLOCATION_KW => Allocation,
FLOW_KW => Flow,
VIEW_KW => View,
VIEWPOINT_KW => Viewpoint,
RENDERING_KW => Rendering,
METADATA_KW => Metadata,
OCCURRENCE_KW => Occurrence,
ENUM_KW => Enum,
ANALYSIS_KW => Analysis,
VERIFICATION_KW => Verification,
USE_KW => UseCase,
CONCERN_KW => Concern,
CLASS_KW => Class,
STRUCT_KW => Struct,
ASSOC_KW => Assoc,
BEHAVIOR_KW => Behavior,
FUNCTION_KW => Function,
PREDICATE_KW => Predicate,
INTERACTION_KW => Interaction,
DATATYPE_KW => Datatype,
CLASSIFIER_KW => Classifier,
TYPE_KW => Type,
METACLASS_KW => Metaclass,
]);
first_child_method!(name, Name);
children_method!(specializations, Specialization);
first_child_method!(body, NamespaceBody);
first_child_method!(constraint_body, ConstraintBody);
body_members_method!();
prefix_metadata_method!();
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DefinitionKind {
Part,
Attribute,
Port,
Item,
Action,
State,
Constraint,
Requirement,
Case,
Calc,
Connection,
Interface,
Allocation,
Flow,
View,
Viewpoint,
Rendering,
Metadata,
Occurrence,
Enum,
Analysis,
Verification,
UseCase,
Concern,
Class,
Struct,
Assoc,
Behavior,
Function,
Predicate,
Interaction,
Datatype,
Classifier,
Type,
Metaclass,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Usage(SyntaxNode);
impl AstNode for Usage {
fn can_cast(kind: SyntaxKind) -> bool {
matches!(
kind,
SyntaxKind::USAGE
| SyntaxKind::SUBJECT_USAGE
| SyntaxKind::ACTOR_USAGE
| SyntaxKind::STAKEHOLDER_USAGE
| SyntaxKind::OBJECTIVE_USAGE
)
}
fn cast(node: SyntaxNode) -> Option<Self> {
if Self::can_cast(node.kind()) {
Some(Self(node))
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode {
&self.0
}
}
impl Usage {
has_token_method!(is_ref, REF_KW, "ref part p;");
has_token_method!(is_readonly, READONLY_KW, "readonly attribute x;");
has_token_method!(is_derived, DERIVED_KW, "derived attribute x;");
has_token_method!(is_abstract, ABSTRACT_KW, "abstract part p;");
has_token_method!(is_variation, VARIATION_KW, "variation part p;");
has_token_method!(is_var, VAR_KW, "var attribute x;");
has_token_method!(is_all, ALL_KW, "feature all instances : C[*]");
has_token_method!(is_parallel, PARALLEL_KW, "parallel action a;");
has_token_method!(
is_individual,
INDIVIDUAL_KW,
"individual part earth : Earth;"
);
has_token_method!(is_end, END_KW, "end part wheel : Wheel[4];");
has_token_method!(is_default, DEFAULT_KW, "default attribute rgb : RGB;");
has_token_method!(is_ordered, ORDERED_KW, "ordered part wheels : Wheel[4];");
has_token_method!(
is_nonunique,
NONUNIQUE_KW,
"nonunique attribute scores : Integer[*];"
);
has_token_method!(is_portion, PORTION_KW, "portion part fuelLoad : Fuel;");
token_to_enum_method!(direction, Direction, [
IN_KW => In,
OUT_KW => Out,
INOUT_KW => InOut,
]);
pub fn multiplicity(&self) -> Option<(Option<u64>, Option<u64>)> {
if let Some(mult_node) = self
.0
.children()
.find(|n| n.kind() == SyntaxKind::MULTIPLICITY)
{
return Self::parse_multiplicity_node(&mult_node);
}
for child in self.0.children() {
match child.kind() {
SyntaxKind::TYPING | SyntaxKind::SPECIALIZATION => {
if let Some(mult_node) = child
.children()
.find(|n| n.kind() == SyntaxKind::MULTIPLICITY)
{
return Self::parse_multiplicity_node(&mult_node);
}
}
_ => {}
}
}
None
}
fn parse_multiplicity_node(mult_node: &SyntaxNode) -> Option<(Option<u64>, Option<u64>)> {
let mut lower: Option<u64> = None;
let mut upper: Option<u64> = None;
let mut found_dot_dot = false;
fn find_bounds(
node: &SyntaxNode,
lower: &mut Option<u64>,
upper: &mut Option<u64>,
found_dot_dot: &mut bool,
) {
for child in node.children_with_tokens() {
match child.kind() {
SyntaxKind::INTEGER => {
if let Some(token) = child.into_token() {
let text = token.text();
if let Ok(val) = text.parse::<u64>() {
if *found_dot_dot {
*upper = Some(val);
} else {
*lower = Some(val);
}
}
}
}
SyntaxKind::STAR => {
if *found_dot_dot {
*upper = None;
} else {
*lower = None;
}
}
SyntaxKind::DOT_DOT => {
*found_dot_dot = true;
}
_ => {
if let Some(node) = child.into_node() {
find_bounds(&node, lower, upper, found_dot_dot);
}
}
}
}
}
find_bounds(mult_node, &mut lower, &mut upper, &mut found_dot_dot);
if !found_dot_dot && lower.is_some() {
upper = lower;
}
if lower.is_some() || upper.is_some() || found_dot_dot {
Some((lower, upper))
} else {
Some((lower, upper))
}
}
pub fn prefix_metadata(&self) -> Vec<PrefixMetadata> {
let mut result = collect_prefix_metadata(&self.0);
for child in self.0.children() {
if child.kind() == SyntaxKind::PREFIX_METADATA {
if let Some(pm) = PrefixMetadata::cast(child) {
result.push(pm);
}
}
}
result
}
first_child_method!(name, Name);
children_vec_method!(names, Name);
first_child_method!(typing, Typing);
child_after_keyword_method!(
of_type,
QualifiedName,
OF_KW,
"Get the 'of Type' qualified name for messages/items (e.g., `message sendCmd of SensedSpeed`)."
);
children_method!(specializations, Specialization);
first_child_method!(body, NamespaceBody);
first_child_method!(value_expression, Expression);
first_child_method!(from_to_clause, FromToClause);
first_child_method!(transition_usage, TransitionUsage);
first_child_method!(succession, Succession);
first_child_method!(perform_action_usage, PerformActionUsage);
first_child_method!(accept_action_usage, AcceptActionUsage);
first_child_method!(send_action_usage, SendActionUsage);
first_child_method!(requirement_verification, RequirementVerification);
first_child_method!(connect_usage, ConnectUsage);
first_child_method!(constraint_body, ConstraintBody);
first_child_method!(connector_part, ConnectorPart);
first_child_method!(binding_connector, BindingConnector);
has_token_method!(is_exhibit, EXHIBIT_KW, "exhibit state s;");
has_token_method!(is_include, INCLUDE_KW, "include use case u;");
has_token_method!(is_allocate, ALLOCATE_KW, "allocate x to y;");
has_token_method!(is_flow, FLOW_KW, "flow x to y;");
pub fn direct_flow_endpoints(&self) -> (Option<QualifiedName>, Option<QualifiedName>) {
if !self.is_flow() {
return (None, None);
}
if self.from_to_clause().is_some() {
return (None, None);
}
let mut found_flow = false;
let mut found_to = false;
let mut source: Option<QualifiedName> = None;
let mut target: Option<QualifiedName> = None;
for elem in self.0.children_with_tokens() {
if let Some(token) = elem.as_token() {
if token.kind() == SyntaxKind::FLOW_KW {
found_flow = true;
} else if token.kind() == SyntaxKind::TO_KW && found_flow {
found_to = true;
}
} else if let Some(node) = elem.as_node() {
if found_flow && node.kind() == SyntaxKind::QUALIFIED_NAME {
if !found_to && source.is_none() {
source = QualifiedName::cast(node.clone());
} else if found_to && target.is_none() {
target = QualifiedName::cast(node.clone());
}
}
}
}
(source, target)
}
has_token_method!(is_assert, ASSERT_KW, "assert constraint c;");
has_token_method!(is_assume, ASSUME_KW, "assume constraint c;");
has_token_method!(is_require, REQUIRE_KW, "require constraint c;");
token_to_enum_method!(usage_kind, UsageKind, [
PART_KW => Part,
ATTRIBUTE_KW => Attribute,
PORT_KW => Port,
ITEM_KW => Item,
ACTION_KW => Action,
STATE_KW => State,
CONSTRAINT_KW => Constraint,
REQUIREMENT_KW => Requirement,
CASE_KW => Case,
CALC_KW => Calc,
CONNECTION_KW => Connection,
INTERFACE_KW => Interface,
ALLOCATION_KW => Allocation,
FLOW_KW => Flow,
MESSAGE_KW => Flow,
OCCURRENCE_KW => Occurrence,
REF_KW => Ref,
FEATURE_KW => Feature,
STEP_KW => Step,
EXPR_KW => Expr,
CONNECTOR_KW => Connector,
]);
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum UsageKind {
Part,
Attribute,
Port,
Item,
Action,
State,
Constraint,
Requirement,
Case,
Calc,
Connection,
Interface,
Allocation,
Flow,
Occurrence,
Ref,
Feature,
Step,
Expr,
Connector,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Direction {
In,
Out,
InOut,
}
ast_node!(Name, NAME);
impl Name {
first_child_method!(short_name, ShortName);
pub fn text(&self) -> Option<String> {
find_name_token(&self.0).map(|t| t.text().to_string())
}
}
ast_node!(ShortName, SHORT_NAME);
impl ShortName {
pub fn text(&self) -> Option<String> {
find_name_token(&self.0).map(|t| strip_unrestricted_name(t.text()))
}
}
ast_node!(QualifiedName, QUALIFIED_NAME);
impl QualifiedName {
pub fn segments(&self) -> Vec<String> {
self.0
.children_with_tokens()
.filter_map(|e| e.into_token())
.filter(|t| is_name_token(t.kind()))
.map(|t| strip_unrestricted_name(t.text()))
.collect()
}
pub fn segments_with_ranges(&self) -> Vec<(String, rowan::TextRange)> {
self.0
.children_with_tokens()
.filter_map(|e| e.into_token())
.filter(|t| is_name_token(t.kind()))
.map(|t| (strip_unrestricted_name(t.text()), t.text_range()))
.collect()
}
fn to_string_inner(&self) -> String {
let has_dot = has_token(&self.0, SyntaxKind::DOT);
let separator = if has_dot { "." } else { "::" };
self.segments().join(separator)
}
}
impl std::fmt::Display for QualifiedName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.to_string_inner())
}
}
ast_node!(Typing, TYPING);
impl Typing {
first_child_method!(target, QualifiedName);
}
ast_node!(Specialization, SPECIALIZATION);
impl Specialization {
token_to_enum_method!(kind, SpecializationKind, [
COLON_GT => Specializes,
COLON_GT_GT => Redefines,
COLON_COLON_GT => FeatureChain,
SPECIALIZES_KW => Specializes,
SUBSETS_KW => Subsets,
REDEFINES_KW => Redefines,
REFERENCES_KW => References,
TILDE => Conjugates,
FROM_KW => FeatureChain,
TO_KW => FeatureChain,
CHAINS_KW => FeatureChain,
]);
pub fn is_shorthand_redefines(&self) -> bool {
has_token(&self.0, SyntaxKind::COLON_GT_GT)
}
first_child_method!(target, QualifiedName);
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SpecializationKind {
Specializes,
Subsets,
Redefines,
References,
Conjugates,
FeatureChain,
}
ast_node!(FromToClause, FROM_TO_CLAUSE);
impl FromToClause {
first_child_method!(source, FromToSource);
first_child_method!(target, FromToTarget);
}
ast_node!(FromToSource, FROM_TO_SOURCE);
impl FromToSource {
first_child_method!(target, QualifiedName);
}
ast_node!(FromToTarget, FROM_TO_TARGET);
impl FromToTarget {
first_child_method!(target, QualifiedName);
}
ast_node!(TransitionUsage, TRANSITION_USAGE);
impl TransitionUsage {
pub fn name(&self) -> Option<Name> {
use crate::parser::SyntaxKind;
for child in self.0.children_with_tokens() {
match &child {
rowan::NodeOrToken::Token(t) => {
match t.kind() {
SyntaxKind::FIRST_KW
| SyntaxKind::ACCEPT_KW
| SyntaxKind::THEN_KW
| SyntaxKind::DO_KW
| SyntaxKind::IF_KW
| SyntaxKind::VIA_KW => return None,
_ => {}
}
}
rowan::NodeOrToken::Node(n) => {
if let Some(name) = Name::cast(n.clone()) {
return Some(name);
}
}
}
}
None
}
children_method!(specializations, Specialization);
source_target_pair!(source, target, specializations, Specialization);
child_after_keyword_method!(
accept_payload_name,
Name,
ACCEPT_KW,
"Get the accept payload name (e.g., `ignitionCmd` in `accept ignitionCmd:IgnitionCmd`)."
);
first_child_method!(accept_typing, Typing);
child_after_keyword_method!(
accept_via,
QualifiedName,
VIA_KW,
"Get the 'via' target for the accept trigger (e.g., `ignitionCmdPort` in `accept ignitionCmd via ignitionCmdPort`)."
);
}
ast_node!(PerformActionUsage, PERFORM_ACTION_USAGE);
impl PerformActionUsage {
first_child_method!(name, Name);
first_child_method!(typing, Typing);
children_method!(specializations, Specialization);
pub fn performed(&self) -> Option<Specialization> {
self.specializations().next()
}
first_child_method!(body, NamespaceBody);
}
ast_node!(AcceptActionUsage, ACCEPT_ACTION_USAGE);
impl AcceptActionUsage {
first_child_method!(name, Name);
first_child_method!(trigger, Expression);
first_child_method!(accepted, QualifiedName);
child_after_keyword_method!(
via,
QualifiedName,
VIA_KW,
"Get the 'via' target port (e.g., `ignitionCmdPort` in `accept ignitionCmd via ignitionCmdPort`)."
);
}
ast_node!(SendActionUsage, SEND_ACTION_USAGE);
impl SendActionUsage {
first_child_method!(payload, Expression);
children_method!(qualified_names, QualifiedName);
}
ast_node!(ForLoopActionUsage, FOR_LOOP_ACTION_USAGE);
impl ForLoopActionUsage {
first_child_method!(variable_name, Name);
first_child_method!(typing, Typing);
first_child_method!(body, NamespaceBody);
body_members_method!();
}
ast_node!(IfActionUsage, IF_ACTION_USAGE);
impl IfActionUsage {
descendants_method!(
expressions,
Expression,
"Get descendant expressions (condition and then/else targets)."
);
children_method!(qualified_names, QualifiedName);
first_child_method!(body, NamespaceBody);
}
ast_node!(WhileLoopActionUsage, WHILE_LOOP_ACTION_USAGE);
impl WhileLoopActionUsage {
descendants_method!(
expressions,
Expression,
"Get descendant expressions (condition)."
);
first_child_method!(body, NamespaceBody);
body_members_method!();
}
ast_node!(StateSubaction, STATE_SUBACTION);
impl StateSubaction {
find_token_kind_method!(
kind,
[ENTRY_KW, DO_KW, EXIT_KW],
"Get the state subaction kind (entry, do, or exit)."
);
first_child_method!(name, Name);
first_child_method!(body, NamespaceBody);
has_token_method!(is_entry, ENTRY_KW, "entry action initial;");
has_token_method!(is_do, DO_KW, "do action running;");
has_token_method!(is_exit, EXIT_KW, "exit action cleanup;");
}
ast_node!(ControlNode, CONTROL_NODE);
impl ControlNode {
find_token_kind_method!(
kind,
[FORK_KW, JOIN_KW, MERGE_KW, DECIDE_KW],
"Get the control node kind (fork, join, merge, or decide)."
);
first_child_method!(name, Name);
first_child_method!(body, NamespaceBody);
has_token_method!(is_fork, FORK_KW, "fork forkNode;");
has_token_method!(is_join, JOIN_KW, "join joinNode;");
has_token_method!(is_merge, MERGE_KW, "merge mergeNode;");
has_token_method!(is_decide, DECIDE_KW, "decide decideNode;");
}
ast_node!(RequirementVerification, REQUIREMENT_VERIFICATION);
impl RequirementVerification {
has_token_method!(is_satisfy, SATISFY_KW, "satisfy requirement R;");
has_token_method!(is_verify, VERIFY_KW, "verify requirement R;");
has_token_method!(is_negated, NOT_KW, "not satisfy requirement R;");
has_token_method!(is_asserted, ASSERT_KW, "assert satisfy requirement R;");
first_child_method!(requirement, QualifiedName);
first_child_method!(typing, Typing);
child_after_keyword_method!(
by_target,
QualifiedName,
BY_KW,
"Get the 'by' target (e.g., `vehicle_b` in `satisfy R by vehicle_b`)."
);
}
ast_node!(Connector, CONNECTOR);
impl Connector {
first_child_method!(name, Name);
first_child_method!(typing, Typing);
first_child_method!(connector_part, ConnectorPart);
pub fn ends(&self) -> impl Iterator<Item = ConnectorEnd> + '_ {
let from_part: Vec<_> = self
.connector_part()
.into_iter()
.flat_map(|cp| cp.ends().collect::<Vec<_>>())
.collect();
let direct: Vec<_> = if from_part.is_empty() {
self.0.children().filter_map(ConnectorEnd::cast).collect()
} else {
Vec::new()
};
from_part.into_iter().chain(direct)
}
first_child_method!(body, NamespaceBody);
}
ast_node!(ConnectUsage, CONNECT_USAGE);
impl ConnectUsage {
first_child_method!(connector_part, ConnectorPart);
}
ast_node!(ConnectorPart, CONNECTOR_PART);
impl ConnectorPart {
children_method!(ends, ConnectorEnd);
source_target_pair!(source, target, ends, ConnectorEnd);
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ConnectorEnd(SyntaxNode);
impl AstNode for ConnectorEnd {
fn can_cast(kind: SyntaxKind) -> bool {
kind == SyntaxKind::CONNECTION_END || kind == SyntaxKind::CONNECTOR_END
}
fn cast(node: SyntaxNode) -> Option<Self> {
if Self::can_cast(node.kind()) {
Some(Self(node))
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode {
&self.0
}
}
impl ConnectorEnd {
fn end_reference_info(&self) -> Option<(SyntaxNode, bool)> {
let ref_node = self
.0
.children()
.find(|n| n.kind() == SyntaxKind::CONNECTOR_END_REFERENCE)?;
let has_references = ref_node.children_with_tokens().any(|n| {
n.kind() == SyntaxKind::COLON_COLON_GT || n.kind() == SyntaxKind::REFERENCES_KW
});
Some((ref_node, has_references))
}
pub fn target(&self) -> Option<QualifiedName> {
if let Some((ref_node, has_references)) = self.end_reference_info() {
let qns: Vec<_> = ref_node
.children()
.filter_map(QualifiedName::cast)
.collect();
if has_references && qns.len() > 1 {
return Some(qns[1].clone());
} else {
return qns.into_iter().next();
}
}
self.0.children().find_map(QualifiedName::cast)
}
pub fn endpoint_name(&self) -> Option<QualifiedName> {
if let Some((ref_node, has_references)) = self.end_reference_info() {
if has_references {
return ref_node.children().filter_map(QualifiedName::cast).next();
}
}
None
}
}
ast_node!(BindingConnector, BINDING_CONNECTOR);
impl BindingConnector {
children_method!(qualified_names, QualifiedName);
source_target_pair!(source, target, qualified_names, QualifiedName);
}
ast_node!(Succession, SUCCESSION);
impl Succession {
children_method!(items, SuccessionItem);
source_target_pair!(source, target, items, SuccessionItem);
children_method!(inline_usages, Usage);
}
ast_node!(SuccessionItem, SUCCESSION_ITEM);
impl SuccessionItem {
first_child_method!(target, QualifiedName);
first_child_method!(usage, Usage);
}
ast_node!(ConstraintBody, CONSTRAINT_BODY);
impl ConstraintBody {
first_child_method!(expression, Expression);
children_method!(members, NamespaceMember);
}
ast_node!(Expression, EXPRESSION);
#[derive(Debug, Clone)]
pub struct FeatureChainRef {
pub parts: Vec<(String, rowan::TextRange)>,
pub full_range: rowan::TextRange,
}
impl Expression {
pub fn references(&self) -> Vec<(String, rowan::TextRange)> {
let mut refs = Vec::new();
self.collect_references(&self.0, &mut refs);
refs
}
pub fn feature_chains(&self) -> Vec<FeatureChainRef> {
let mut chains = Vec::new();
self.collect_feature_chains(&self.0, &mut chains);
chains
}
pub fn named_constructor_args(&self) -> Vec<(String, String, rowan::TextRange)> {
let mut results = Vec::new();
self.collect_named_constructor_args(&self.0, &mut results);
results
}
fn collect_named_constructor_args(
&self,
node: &SyntaxNode,
results: &mut Vec<(String, String, rowan::TextRange)>,
) {
let children: Vec<_> = node.children_with_tokens().collect();
for (i, child) in children.iter().enumerate() {
if child.as_token().map(|t| t.kind()) == Some(SyntaxKind::NEW_KW) {
let rest = &children[i + 1..];
let type_name = rest
.iter()
.filter_map(|c| c.as_node())
.find(|n| n.kind() == SyntaxKind::QUALIFIED_NAME)
.map(|n| n.text().to_string());
if let Some(type_name) = type_name {
for arg_list in rest
.iter()
.filter_map(|c| c.as_node())
.filter(|n| n.kind() == SyntaxKind::ARGUMENT_LIST)
{
self.extract_named_args_from_list(arg_list, &type_name, results);
}
}
}
}
for child in node.children() {
self.collect_named_constructor_args(&child, results);
}
}
fn extract_named_args_from_list(
&self,
arg_list: &SyntaxNode,
type_name: &str,
results: &mut Vec<(String, String, rowan::TextRange)>,
) {
for child in arg_list
.children()
.filter(|c| c.kind() == SyntaxKind::ARGUMENT_LIST)
{
let tokens: Vec<_> = child.children_with_tokens().collect();
for (idx, elem) in tokens.iter().enumerate() {
if let Some(token) = elem.as_token().filter(|t| t.kind() == SyntaxKind::IDENT) {
let has_eq = tokens[idx + 1..]
.iter()
.filter_map(|e| e.as_token())
.find(|t| t.kind() != SyntaxKind::WHITESPACE)
.map(|t| t.kind() == SyntaxKind::EQ)
.unwrap_or(false);
if has_eq {
results.push((
type_name.to_string(),
token.text().to_string(),
token.text_range(),
));
}
}
}
self.extract_named_args_from_list(&child, type_name, results);
}
}
fn collect_feature_chains(&self, node: &SyntaxNode, chains: &mut Vec<FeatureChainRef>) {
if node.kind() == SyntaxKind::QUALIFIED_NAME {
let parts: Vec<_> = node
.children_with_tokens()
.filter_map(|c| c.into_token())
.filter(|t| t.kind() == SyntaxKind::IDENT)
.map(|t| (t.text().to_string(), t.text_range()))
.collect();
if !parts.is_empty() {
chains.push(FeatureChainRef {
parts,
full_range: node.text_range(),
});
}
return;
}
for child in node.children_with_tokens() {
match child {
rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::IDENT => {
chains.push(FeatureChainRef {
parts: vec![(t.text().to_string(), t.text_range())],
full_range: t.text_range(),
});
}
rowan::NodeOrToken::Node(n) => self.collect_feature_chains(&n, chains),
_ => {}
}
}
}
fn collect_references(&self, node: &SyntaxNode, refs: &mut Vec<(String, rowan::TextRange)>) {
for child in node.children_with_tokens() {
match child {
rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::IDENT => {
refs.push((t.text().to_string(), t.text_range()));
}
rowan::NodeOrToken::Node(n) => self.collect_references(&n, refs),
_ => {}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::parser::parse_sysml;
#[test]
fn test_ast_package() {
let parsed = parse_sysml("package Test;");
let root = SourceFile::cast(parsed.syntax()).unwrap();
let members: Vec<_> = root.members().collect();
assert_eq!(members.len(), 1);
if let NamespaceMember::Package(pkg) = &members[0] {
let name = pkg.name().unwrap();
assert_eq!(name.text(), Some("Test".to_string()));
} else {
panic!("expected Package");
}
}
#[test]
fn test_ast_import() {
let parsed = parse_sysml("import ISQ::*;");
let root = SourceFile::cast(parsed.syntax()).unwrap();
let members: Vec<_> = root.members().collect();
assert_eq!(members.len(), 1);
if let NamespaceMember::Import(imp) = &members[0] {
assert!(!imp.is_all());
assert!(imp.is_wildcard());
assert!(!imp.is_recursive());
let target = imp.target().unwrap();
assert_eq!(target.segments(), vec!["ISQ"]);
} else {
panic!("expected Import");
}
}
#[test]
fn test_ast_import_recursive() {
let parsed = parse_sysml("import all Library::**;");
assert!(parsed.ok(), "errors: {:?}", parsed.errors);
let root = SourceFile::cast(parsed.syntax()).unwrap();
let members: Vec<_> = root.members().collect();
if let NamespaceMember::Import(imp) = &members[0] {
assert!(imp.is_all());
assert!(imp.is_recursive());
} else {
panic!("expected Import");
}
}
#[test]
fn test_ast_definition() {
let parsed = parse_sysml("abstract part def Vehicle :> Base;");
let root = SourceFile::cast(parsed.syntax()).unwrap();
let members: Vec<_> = root.members().collect();
if let NamespaceMember::Definition(def) = &members[0] {
assert!(def.is_abstract());
assert_eq!(def.definition_kind(), Some(DefinitionKind::Part));
let name = def.name().unwrap();
assert_eq!(name.text(), Some("Vehicle".to_string()));
let specializations: Vec<_> = def.specializations().collect();
assert_eq!(specializations.len(), 1);
assert_eq!(
specializations[0].kind(),
Some(SpecializationKind::Specializes)
);
} else {
panic!("expected Definition");
}
}
#[test]
fn test_ast_usage() {
let parsed = parse_sysml("ref part engine : Engine;");
let root = SourceFile::cast(parsed.syntax()).unwrap();
let members: Vec<_> = root.members().collect();
if let NamespaceMember::Usage(usage) = &members[0] {
assert!(usage.is_ref());
let name = usage.name().unwrap();
assert_eq!(name.text(), Some("engine".to_string()));
let typing = usage.typing().unwrap();
let target = typing.target().unwrap();
assert_eq!(target.segments(), vec!["Engine"]);
} else {
panic!("expected Usage");
}
}
#[test]
fn test_message_usage_name() {
let parsed = parse_sysml("part p { message of ignitionCmd : IgnitionCmd; }");
let root = SourceFile::cast(parsed.syntax()).unwrap();
let members: Vec<_> = root.members().collect();
if let NamespaceMember::Usage(part_usage) = &members[0] {
if let Some(body) = part_usage.body() {
let inner_members: Vec<_> = body.members().collect();
if let NamespaceMember::Usage(usage) = &inner_members[0] {
let name = usage.name();
assert!(name.is_some(), "message usage should have a name");
assert_eq!(name.unwrap().text(), Some("ignitionCmd".to_string()));
return;
}
}
}
panic!("expected Usage for part p with message inside");
}
}