use std::sync::Arc;
use cairo_lang_defs::db::DefsGroup;
use cairo_lang_defs::ids::{
LanguageElementId, LookupItemId, MacroDeclarationId, ModuleId, ModuleItemId,
};
use cairo_lang_diagnostics::{Diagnostics, Maybe, skip_diagnostic};
use cairo_lang_filesystem::db::FilesGroup;
use cairo_lang_filesystem::ids::{CodeMapping, CodeOrigin, SmolStrId};
use cairo_lang_filesystem::span::{TextSpan, TextWidth};
use cairo_lang_parser::macro_helpers::as_expr_macro_token_tree;
use cairo_lang_syntax::attribute::structured::{Attribute, AttributeListStructurize};
use cairo_lang_syntax::node::ast::{MacroElement, MacroParam};
use cairo_lang_syntax::node::ids::SyntaxStablePtrId;
use cairo_lang_syntax::node::kind::SyntaxKind;
use cairo_lang_syntax::node::{SyntaxNode, Terminal, TypedStablePtr, TypedSyntaxNode, ast};
use cairo_lang_utils::ordered_hash_map::OrderedHashMap;
use salsa::Database;
use crate::SemanticDiagnostic;
use crate::diagnostic::{SemanticDiagnosticKind, SemanticDiagnostics, SemanticDiagnosticsBuilder};
use crate::expr::inference::InferenceId;
use crate::keyword::{MACRO_CALL_SITE, MACRO_DEF_SITE};
use crate::resolve::{Resolver, ResolverData};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct RepetitionId(usize);
type Captures<'db> = OrderedHashMap<SmolStrId<'db>, Vec<CapturedValue<'db>>>;
#[derive(Default, Clone, Debug)]
pub struct MatcherContext<'db> {
pub captures: Captures<'db>,
pub placeholder_to_rep_id: OrderedHashMap<SmolStrId<'db>, RepetitionId>,
pub current_repetition_stack: Vec<RepetitionId>,
pub next_repetition_id: usize,
pub repetition_indices: OrderedHashMap<RepetitionId, usize>,
pub repetition_match_counts: OrderedHashMap<RepetitionId, usize>,
pub repetition_operators: OrderedHashMap<RepetitionId, ast::MacroRepetitionOperator<'db>>,
}
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
pub struct MacroDeclarationData<'db> {
rules: Vec<MacroRuleData<'db>>,
attributes: Vec<Attribute<'db>>,
diagnostics: Diagnostics<'db, SemanticDiagnostic<'db>>,
resolver_data: Arc<ResolverData<'db>>,
}
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
pub struct MacroRuleData<'db> {
pub pattern: ast::WrappedMacro<'db>,
pub expansion: ast::MacroElements<'db>,
pub err: Maybe<()>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum PlaceholderKind {
Identifier,
Expr,
}
impl<'db> From<ast::MacroParamKind<'db>> for PlaceholderKind {
fn from(kind: ast::MacroParamKind<'db>) -> Self {
match kind {
ast::MacroParamKind::Identifier(_) => PlaceholderKind::Identifier,
ast::MacroParamKind::Expr(_) => PlaceholderKind::Expr,
ast::MacroParamKind::Missing(_) => unreachable!(
"Missing macro rule param kind, should have been handled by the parser."
),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CapturedValue<'db> {
pub text: String,
pub stable_ptr: SyntaxStablePtrId<'db>,
}
fn priv_macro_declaration_data<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<MacroDeclarationData<'db>> {
let module_id = macro_declaration_id.parent_module(db);
let mut diagnostics = SemanticDiagnostics::new(module_id);
let macro_declaration_syntax = db.module_macro_declaration_by_id(macro_declaration_id)?;
if !are_user_defined_inline_macros_enabled(db, module_id) {
diagnostics.report(
macro_declaration_syntax.stable_ptr(db).untyped(),
SemanticDiagnosticKind::UserDefinedInlineMacrosDisabled,
);
}
let attributes = macro_declaration_syntax.attributes(db).structurize(db);
let inference_id = InferenceId::LookupItemDeclaration(LookupItemId::ModuleItem(
ModuleItemId::MacroDeclaration(macro_declaration_id),
));
let resolver = Resolver::new(db, module_id, inference_id);
let mut rules = vec![];
for rule_syntax in macro_declaration_syntax.rules(db).elements(db) {
let pattern = rule_syntax.lhs(db);
let expansion = rule_syntax.rhs(db).elements(db);
let pattern_elements = get_macro_elements(db, pattern.clone());
let mut placeholder_paths: OrderedHashMap<SmolStrId<'db>, Vec<usize>> = Default::default();
let mut next_rep_id = 0;
collect_placeholder_paths(
db,
pattern_elements.elements(db),
&mut vec![],
&mut next_rep_id,
&mut placeholder_paths,
);
let mut ctx = ExpansionCheckCtx {
db,
known_path: &[],
curr_rep_depth: 0,
placeholder_paths: &placeholder_paths,
diagnostics: &mut diagnostics,
rule_err: Ok(()),
};
ctx.check_node(expansion.as_syntax_node());
rules.push(MacroRuleData { pattern, expansion, err: ctx.rule_err });
}
let resolver_data = Arc::new(resolver.data);
Ok(MacroDeclarationData { diagnostics: diagnostics.build(), attributes, resolver_data, rules })
}
#[salsa::tracked]
fn priv_macro_declaration_data_tracked<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<MacroDeclarationData<'db>> {
priv_macro_declaration_data(db, macro_declaration_id)
}
fn get_macro_elements<'db>(
db: &'db dyn Database,
pattern: ast::WrappedMacro<'db>,
) -> ast::MacroElements<'db> {
match pattern {
ast::WrappedMacro::Parenthesized(inner) => inner.elements(db),
ast::WrappedMacro::Braced(inner) => inner.elements(db),
ast::WrappedMacro::Bracketed(inner) => inner.elements(db),
}
}
fn extract_placeholder<'db>(
db: &'db dyn Database,
path_node: &MacroParam<'db>,
) -> Option<SmolStrId<'db>> {
let placeholder_name = path_node.name(db).as_syntax_node().get_text_without_trivia(db);
if ![MACRO_DEF_SITE, MACRO_CALL_SITE].contains(&placeholder_name.long(db).as_str()) {
return Some(placeholder_name);
}
None
}
fn collect_placeholder_paths<'db>(
db: &'db dyn Database,
elements: impl IntoIterator<Item = ast::MacroElement<'db>>,
current_path: &mut Vec<usize>,
next_rep_id: &mut usize,
result: &mut OrderedHashMap<SmolStrId<'db>, Vec<usize>>,
) {
for element in elements {
match element {
ast::MacroElement::Param(param) => {
result.insert(
param.name(db).as_syntax_node().get_text_without_trivia(db),
current_path.clone(),
);
}
ast::MacroElement::Repetition(rep) => {
let rep_id = *next_rep_id;
*next_rep_id += 1;
current_path.push(rep_id);
let inner = rep.elements(db).elements(db);
collect_placeholder_paths(db, inner, current_path, next_rep_id, result);
assert_eq!(current_path.pop(), Some(rep_id));
}
ast::MacroElement::Subtree(subtree) => {
let inner = get_macro_elements(db, subtree.subtree(db)).elements(db);
collect_placeholder_paths(db, inner, current_path, next_rep_id, result);
}
ast::MacroElement::Token(_) => {}
}
}
}
struct ExpansionCheckCtx<'db, 'a> {
db: &'db dyn Database,
placeholder_paths: &'a OrderedHashMap<SmolStrId<'db>, Vec<usize>>,
curr_rep_depth: usize,
known_path: &'a [usize],
diagnostics: &'a mut SemanticDiagnostics<'db>,
rule_err: Maybe<()>,
}
impl<'db> ExpansionCheckCtx<'db, '_> {
fn check_node(&mut self, node: SyntaxNode<'db>) {
let db = self.db;
if let Some(param) = MacroParam::cast(db, node) {
if let Some(name) = extract_placeholder(db, ¶m) {
let ptr = param.stable_ptr(db).untyped();
match self.placeholder_paths.get(&name) {
None => {
self.rule_err = Err(self
.diagnostics
.report(ptr, SemanticDiagnosticKind::UndefinedMacroPlaceholder(name)));
}
Some(path) => {
if path.len() > self.curr_rep_depth {
self.rule_err = Err(self.diagnostics.report(
ptr,
SemanticDiagnosticKind::MacroPlaceholderRepDepthMismatch {
name,
required: path.len(),
actual: self.curr_rep_depth,
},
));
} else {
let cmp_size = path.len().min(self.known_path.len());
if path[..cmp_size] != self.known_path[..cmp_size] {
self.rule_err = Err(self.diagnostics.report(
ptr,
SemanticDiagnosticKind::MacroPlaceholderRepDriverMismatch(name),
));
} else if path.len() > self.known_path.len() {
self.known_path = path;
}
}
}
}
}
return;
}
if let Some(repetition) = ast::MacroRepetition::cast(db, node) {
self.curr_rep_depth += 1;
for element in repetition.elements(db).elements(db) {
self.check_node(element.as_syntax_node());
}
self.curr_rep_depth -= 1;
if self.curr_rep_depth < self.known_path.len() {
self.known_path = &self.known_path[..self.curr_rep_depth];
}
} else if !node.kind(db).is_terminal() {
for child in node.get_children(db).iter() {
self.check_node(*child);
}
}
}
}
pub fn is_macro_rule_match<'db>(
db: &'db dyn Database,
rule: &MacroRuleData<'db>,
input: &ast::TokenTreeNode<'db>,
) -> Option<(Captures<'db>, OrderedHashMap<SmolStrId<'db>, RepetitionId>)> {
let mut ctx = MatcherContext::default();
let matcher_elements = get_macro_elements(db, rule.pattern.clone());
let mut input_iter = match input.subtree(db) {
ast::WrappedTokenTree::Parenthesized(tt) => tt.tokens(db),
ast::WrappedTokenTree::Braced(tt) => tt.tokens(db),
ast::WrappedTokenTree::Bracketed(tt) => tt.tokens(db),
ast::WrappedTokenTree::Missing(_) => unreachable!(),
}
.elements(db)
.peekable();
is_macro_rule_match_ex(db, matcher_elements, &mut input_iter, &mut ctx, true)?;
if !validate_repetition_operator_constraints(&ctx) {
return None;
}
Some((ctx.captures, ctx.placeholder_to_rep_id))
}
fn is_macro_rule_match_ex<'db>(
db: &'db dyn Database,
matcher_elements: ast::MacroElements<'db>,
input_iter: &mut std::iter::Peekable<
impl DoubleEndedIterator<Item = ast::TokenTree<'db>> + Clone,
>,
ctx: &mut MatcherContext<'db>,
consume_all_input: bool,
) -> Option<bool> {
let mut advanced = false;
for matcher_element in matcher_elements.elements(db) {
match matcher_element {
ast::MacroElement::Token(matcher_token) => {
advanced = true;
let input_token = input_iter.next()?;
match input_token {
ast::TokenTree::Token(token_tree_leaf) => {
if matcher_token.as_syntax_node().get_text_without_trivia(db)
!= token_tree_leaf.as_syntax_node().get_text_without_trivia(db)
{
return None;
}
continue;
}
ast::TokenTree::Subtree(_) => return None,
ast::TokenTree::Repetition(_) => return None,
ast::TokenTree::Param(_) => return None,
ast::TokenTree::Missing(_) => unreachable!(),
}
}
ast::MacroElement::Param(param) => {
advanced = true;
let placeholder_kind: PlaceholderKind =
if let ast::OptionParamKind::ParamKind(param_kind) = param.kind(db) {
param_kind.kind(db).into()
} else {
return None;
};
let placeholder_name = param.name(db).as_syntax_node().get_text_without_trivia(db);
match placeholder_kind {
PlaceholderKind::Identifier => {
let input_token = input_iter.next()?;
let captured_text = match &input_token {
ast::TokenTree::Token(token_tree_leaf) => {
match token_tree_leaf.leaf(db) {
ast::TokenNode::TerminalIdentifier(terminal_identifier) => {
terminal_identifier.text(db).to_string(db)
}
_ => return None,
}
}
_ => return None,
};
ctx.captures.entry(placeholder_name).or_default().push(CapturedValue {
text: captured_text,
stable_ptr: input_token.stable_ptr(db).untyped(),
});
if let Some(rep_id) = ctx.current_repetition_stack.last() {
ctx.placeholder_to_rep_id.insert(placeholder_name, *rep_id);
}
continue;
}
PlaceholderKind::Expr => {
let peek_token = input_iter.peek().cloned()?;
let file_id = peek_token.as_syntax_node().stable_ptr(db).file_id(db);
let expr_node = as_expr_macro_token_tree(input_iter.clone(), file_id, db)?;
let expr_text = expr_node.as_syntax_node().get_text(db);
let expr_length = expr_text.len();
if expr_length == 0 {
return None;
}
ctx.captures.entry(placeholder_name).or_default().push(CapturedValue {
text: expr_text.to_string(),
stable_ptr: peek_token.stable_ptr(db).untyped(),
});
if let Some(rep_id) = ctx.current_repetition_stack.last() {
ctx.placeholder_to_rep_id.insert(placeholder_name, *rep_id);
}
let expr_length = expr_text.len();
let mut current_length = 0;
for token_tree_leaf in input_iter.by_ref() {
let token_text = match token_tree_leaf {
ast::TokenTree::Token(leaf) => leaf.as_syntax_node(),
ast::TokenTree::Subtree(subtree) => subtree.as_syntax_node(),
ast::TokenTree::Repetition(rep) => rep.as_syntax_node(),
ast::TokenTree::Param(param) => param.as_syntax_node(),
ast::TokenTree::Missing(_) => unreachable!(),
}
.get_text(db);
current_length += token_text.len();
if current_length >= expr_length {
break;
}
}
continue;
}
}
}
ast::MacroElement::Subtree(matcher_subtree) => {
advanced = true;
let input_token = input_iter.next()?;
if let ast::TokenTree::Subtree(input_subtree) = input_token {
let inner_elements = get_macro_elements(db, matcher_subtree.subtree(db));
let mut inner_input_iter = match input_subtree.subtree(db) {
ast::WrappedTokenTree::Parenthesized(tt) => tt.tokens(db),
ast::WrappedTokenTree::Braced(tt) => tt.tokens(db),
ast::WrappedTokenTree::Bracketed(tt) => tt.tokens(db),
ast::WrappedTokenTree::Missing(_) => unreachable!(),
}
.elements(db)
.peekable();
is_macro_rule_match_ex(db, inner_elements, &mut inner_input_iter, ctx, true)?;
continue;
} else {
return None;
}
}
ast::MacroElement::Repetition(repetition) => {
let rep_id = RepetitionId(ctx.next_repetition_id);
ctx.next_repetition_id += 1;
ctx.current_repetition_stack.push(rep_id);
let elements = repetition.elements(db);
let operator = repetition.operator(db);
let separator_token = repetition.separator(db);
let expected_separator = match separator_token {
ast::OptionTerminalComma::TerminalComma(sep) => {
Some(sep.as_syntax_node().get_text_without_trivia(db))
}
ast::OptionTerminalComma::Empty(_) => None,
};
let mut match_count = 0;
loop {
let mut inner_ctx = ctx.clone();
let mut temp_iter = input_iter.clone();
let Some(true) = is_macro_rule_match_ex(
db,
elements.clone(),
&mut temp_iter,
&mut inner_ctx,
false,
) else {
break;
};
advanced = true;
*ctx = inner_ctx;
*input_iter = temp_iter;
match_count += 1;
if let Some(expected_sep) = &expected_separator {
if let Some(ast::TokenTree::Token(token_leaf)) = input_iter.peek() {
let actual = token_leaf.as_syntax_node().get_text_without_trivia(db);
if actual == *expected_sep {
input_iter.next();
} else {
break;
}
} else {
break;
}
}
}
ctx.repetition_match_counts.insert(rep_id, match_count);
ctx.repetition_operators.insert(rep_id, operator.clone());
for placeholder_name in ctx.captures.keys() {
ctx.placeholder_to_rep_id.insert(*placeholder_name, rep_id);
}
for i in 0..match_count {
ctx.repetition_indices.insert(rep_id, i);
}
ctx.current_repetition_stack.pop();
continue;
}
}
}
if consume_all_input && input_iter.next().is_some() {
return None;
}
Some(advanced)
}
fn validate_repetition_operator_constraints(ctx: &MatcherContext<'_>) -> bool {
for (&rep_id, &count) in ctx.repetition_match_counts.iter() {
match ctx.repetition_operators.get(&rep_id) {
Some(ast::MacroRepetitionOperator::ZeroOrOne(_)) if count > 1 => return false,
Some(ast::MacroRepetitionOperator::OneOrMore(_)) if count < 1 => return false,
Some(ast::MacroRepetitionOperator::ZeroOrMore(_)) | None => {}
_ => {}
}
}
true
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MacroExpansionResult {
pub text: Arc<str>,
pub code_mappings: Arc<[CodeMapping]>,
}
pub fn expand_macro_rule(
db: &dyn Database,
rule: &MacroRuleData<'_>,
matcher_ctx: &mut MatcherContext<'_>,
) -> Maybe<MacroExpansionResult> {
let node = rule.expansion.as_syntax_node();
let mut res_buffer = String::new();
let mut code_mappings = Vec::new();
expand_macro_rule_ex(db, node, matcher_ctx, &mut res_buffer, &mut code_mappings)?;
Ok(MacroExpansionResult { text: res_buffer.into(), code_mappings: code_mappings.into() })
}
fn expand_macro_rule_ex(
db: &dyn Database,
node: SyntaxNode<'_>,
matcher_ctx: &mut MatcherContext<'_>,
res_buffer: &mut String,
code_mappings: &mut Vec<CodeMapping>,
) -> Maybe<()> {
match node.kind(db) {
SyntaxKind::MacroParam => {
let path_node = MacroParam::from_syntax_node(db, node);
if let Some(name) = extract_placeholder(db, &path_node) {
let rep_index = matcher_ctx
.placeholder_to_rep_id
.get(&name)
.and_then(|rep_id| matcher_ctx.repetition_indices.get(rep_id))
.copied();
let value = matcher_ctx
.captures
.get(&name)
.and_then(|v| rep_index.map_or_else(|| v.first(), |i| v.get(i)))
.ok_or_else(skip_diagnostic)?;
let start = TextWidth::from_str(res_buffer).as_offset();
let span = TextSpan::new_with_width(start, TextWidth::from_str(&value.text));
res_buffer.push_str(&value.text);
code_mappings.push(CodeMapping {
span,
origin: CodeOrigin::Span(value.stable_ptr.lookup(db).span_without_trivia(db)),
});
return Ok(());
}
}
SyntaxKind::MacroRepetition => {
let repetition = ast::MacroRepetition::from_syntax_node(db, node);
let elements = repetition.elements(db);
let first_param = find_first_repetition_param(db, elements.elements(db))
.ok_or_else(skip_diagnostic)?;
let placeholder_name = first_param.name(db).text(db);
let Some(rep_id) = matcher_ctx.placeholder_to_rep_id.get(&placeholder_name).copied()
else {
return Ok(());
};
let repetition_len =
matcher_ctx.captures.get(&placeholder_name).map(|v| v.len()).unwrap_or(0);
for i in 0..repetition_len {
matcher_ctx.repetition_indices.insert(rep_id, i);
for element in elements.elements(db) {
expand_macro_rule_ex(
db,
element.as_syntax_node(),
matcher_ctx,
res_buffer,
code_mappings,
)?;
}
if i + 1 < repetition_len
&& let ast::OptionTerminalComma::TerminalComma(sep) = repetition.separator(db)
{
res_buffer.push_str(sep.as_syntax_node().get_text(db));
}
}
matcher_ctx.repetition_indices.swap_remove(&rep_id);
return Ok(());
}
_ => {
if node.kind(db).is_terminal() {
res_buffer.push_str(node.get_text(db));
return Ok(());
}
for child in node.get_children(db).iter() {
expand_macro_rule_ex(db, *child, matcher_ctx, res_buffer, code_mappings)?;
}
return Ok(());
}
}
if node.kind(db).is_terminal() {
res_buffer.push_str(node.get_text(db));
return Ok(());
}
for child in node.get_children(db).iter() {
expand_macro_rule_ex(db, *child, matcher_ctx, res_buffer, code_mappings)?;
}
Ok(())
}
fn find_first_repetition_param<'db>(
db: &'db dyn Database,
elements: impl IntoIterator<Item = MacroElement<'db>>,
) -> Option<MacroParam<'db>> {
for element in elements {
match element {
ast::MacroElement::Param(param) => return Some(param),
ast::MacroElement::Subtree(subtree) => {
let inner_elements = get_macro_elements(db, subtree.subtree(db)).elements(db);
if let Some(param) = find_first_repetition_param(db, inner_elements) {
return Some(param);
}
}
ast::MacroElement::Repetition(repetition) => {
let inner_elements = repetition.elements(db).elements(db);
if let Some(param) = find_first_repetition_param(db, inner_elements) {
return Some(param);
}
}
ast::MacroElement::Token(_) => {}
}
}
None
}
fn macro_declaration_diagnostics<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Diagnostics<'db, SemanticDiagnostic<'db>> {
priv_macro_declaration_data(db, macro_declaration_id)
.map(|data| data.diagnostics)
.unwrap_or_default()
}
#[salsa::tracked]
fn macro_declaration_diagnostics_tracked<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Diagnostics<'db, SemanticDiagnostic<'db>> {
macro_declaration_diagnostics(db, macro_declaration_id)
}
fn macro_declaration_attributes<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<Vec<Attribute<'db>>> {
priv_macro_declaration_data(db, macro_declaration_id).map(|data| data.attributes)
}
#[salsa::tracked]
fn macro_declaration_attributes_tracked<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<Vec<Attribute<'db>>> {
macro_declaration_attributes(db, macro_declaration_id)
}
fn macro_declaration_resolver_data<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<Arc<ResolverData<'db>>> {
priv_macro_declaration_data(db, macro_declaration_id).map(|data| data.resolver_data)
}
#[salsa::tracked]
fn macro_declaration_resolver_data_tracked<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<Arc<ResolverData<'db>>> {
macro_declaration_resolver_data(db, macro_declaration_id)
}
fn macro_declaration_rules<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<Vec<MacroRuleData<'db>>> {
priv_macro_declaration_data(db, macro_declaration_id).map(|data| data.rules)
}
#[salsa::tracked]
fn macro_declaration_rules_tracked<'db>(
db: &'db dyn Database,
macro_declaration_id: MacroDeclarationId<'db>,
) -> Maybe<Vec<MacroRuleData<'db>>> {
macro_declaration_rules(db, macro_declaration_id)
}
fn are_user_defined_inline_macros_enabled<'db>(
db: &dyn Database,
module_id: ModuleId<'db>,
) -> bool {
let owning_crate = module_id.owning_crate(db);
let Some(config) = db.crate_config(owning_crate) else { return false };
config.settings.experimental_features.user_defined_inline_macros
}
pub trait MacroDeclarationSemantic<'db>: Database {
fn priv_macro_declaration_data(
&'db self,
macro_id: MacroDeclarationId<'db>,
) -> Maybe<MacroDeclarationData<'db>> {
priv_macro_declaration_data_tracked(self.as_dyn_database(), macro_id)
}
fn macro_declaration_diagnostics(
&'db self,
macro_id: MacroDeclarationId<'db>,
) -> Diagnostics<'db, SemanticDiagnostic<'db>> {
macro_declaration_diagnostics_tracked(self.as_dyn_database(), macro_id)
}
fn macro_declaration_resolver_data(
&'db self,
macro_id: MacroDeclarationId<'db>,
) -> Maybe<Arc<ResolverData<'db>>> {
macro_declaration_resolver_data_tracked(self.as_dyn_database(), macro_id)
}
fn macro_declaration_attributes(
&'db self,
macro_id: MacroDeclarationId<'db>,
) -> Maybe<Vec<Attribute<'db>>> {
macro_declaration_attributes_tracked(self.as_dyn_database(), macro_id)
}
fn macro_declaration_rules(
&'db self,
macro_id: MacroDeclarationId<'db>,
) -> Maybe<Vec<MacroRuleData<'db>>> {
macro_declaration_rules_tracked(self.as_dyn_database(), macro_id)
}
}
impl<'db, T: Database + ?Sized> MacroDeclarationSemantic<'db> for T {}