use crate::mbe::{KleeneToken, TokenTree};
use rustc_ast::ast::NodeId;
use rustc_ast::token::{DelimToken, Token, TokenKind};
use rustc_data_structures::fx::FxHashMap;
use rustc_session::lint::builtin::META_VARIABLE_MISUSE;
use rustc_session::parse::ParseSess;
use rustc_span::symbol::kw;
use rustc_span::{symbol::MacroRulesNormalizedIdent, MultiSpan, Span};
use smallvec::SmallVec;
enum Stack<'a, T> {
Empty,
Push {
top: T,
prev: &'a Stack<'a, T>,
},
}
impl<'a, T> Stack<'a, T> {
fn is_empty(&self) -> bool {
match *self {
Stack::Empty => true,
_ => false,
}
}
fn push(&'a self, top: T) -> Stack<'a, T> {
Stack::Push { top, prev: self }
}
}
impl<'a, T> Iterator for &'a Stack<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
match *self {
Stack::Empty => None,
Stack::Push { ref top, ref prev } => {
*self = prev;
Some(top)
}
}
}
}
impl From<&Stack<'_, KleeneToken>> for SmallVec<[KleeneToken; 1]> {
fn from(ops: &Stack<'_, KleeneToken>) -> SmallVec<[KleeneToken; 1]> {
let mut ops: SmallVec<[KleeneToken; 1]> = ops.cloned().collect();
ops.reverse();
ops
}
}
struct BinderInfo {
span: Span,
ops: SmallVec<[KleeneToken; 1]>,
}
type Binders = FxHashMap<MacroRulesNormalizedIdent, BinderInfo>;
struct MacroState<'a> {
binders: &'a Binders,
ops: SmallVec<[KleeneToken; 1]>,
}
pub(super) fn check_meta_variables(
sess: &ParseSess,
node_id: NodeId,
span: Span,
lhses: &[TokenTree],
rhses: &[TokenTree],
) -> bool {
if lhses.len() != rhses.len() {
sess.span_diagnostic.span_bug(span, "length mismatch between LHSes and RHSes")
}
let mut valid = true;
for (lhs, rhs) in lhses.iter().zip(rhses.iter()) {
let mut binders = Binders::default();
check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut valid);
check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut valid);
}
valid
}
fn check_binders(
sess: &ParseSess,
node_id: NodeId,
lhs: &TokenTree,
macros: &Stack<'_, MacroState<'_>>,
binders: &mut Binders,
ops: &Stack<'_, KleeneToken>,
valid: &mut bool,
) {
match *lhs {
TokenTree::Token(..) => {}
TokenTree::MetaVar(span, name) => {
if macros.is_empty() {
sess.span_diagnostic.span_bug(span, "unexpected MetaVar in lhs");
}
let name = MacroRulesNormalizedIdent::new(name);
if let Some(prev_info) = binders.get(&name) {
let mut span = MultiSpan::from_span(span);
span.push_span_label(prev_info.span, "previous declaration".into());
buffer_lint(sess, span, node_id, "duplicate matcher binding");
} else if get_binder_info(macros, binders, name).is_none() {
binders.insert(name, BinderInfo { span, ops: ops.into() });
} else {
check_occurrences(sess, node_id, lhs, macros, binders, ops, valid);
}
}
TokenTree::MetaVarDecl(span, name, _kind) => {
if !macros.is_empty() {
sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in nested lhs");
}
let name = MacroRulesNormalizedIdent::new(name);
if let Some(prev_info) = get_binder_info(macros, binders, name) {
sess.span_diagnostic
.struct_span_err(span, "duplicate matcher binding")
.span_label(span, "duplicate binding")
.span_label(prev_info.span, "previous binding")
.emit();
*valid = false;
} else {
binders.insert(name, BinderInfo { span, ops: ops.into() });
}
}
TokenTree::Delimited(_, ref del) => {
for tt in &del.tts {
check_binders(sess, node_id, tt, macros, binders, ops, valid);
}
}
TokenTree::Sequence(_, ref seq) => {
let ops = ops.push(seq.kleene);
for tt in &seq.tts {
check_binders(sess, node_id, tt, macros, binders, &ops, valid);
}
}
}
}
fn get_binder_info<'a>(
mut macros: &'a Stack<'a, MacroState<'a>>,
binders: &'a Binders,
name: MacroRulesNormalizedIdent,
) -> Option<&'a BinderInfo> {
binders.get(&name).or_else(|| macros.find_map(|state| state.binders.get(&name)))
}
fn check_occurrences(
sess: &ParseSess,
node_id: NodeId,
rhs: &TokenTree,
macros: &Stack<'_, MacroState<'_>>,
binders: &Binders,
ops: &Stack<'_, KleeneToken>,
valid: &mut bool,
) {
match *rhs {
TokenTree::Token(..) => {}
TokenTree::MetaVarDecl(span, _name, _kind) => {
sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in rhs")
}
TokenTree::MetaVar(span, name) => {
let name = MacroRulesNormalizedIdent::new(name);
check_ops_is_prefix(sess, node_id, macros, binders, ops, span, name);
}
TokenTree::Delimited(_, ref del) => {
check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
}
TokenTree::Sequence(_, ref seq) => {
let ops = ops.push(seq.kleene);
check_nested_occurrences(sess, node_id, &seq.tts, macros, binders, &ops, valid);
}
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
enum NestedMacroState {
Empty,
MacroRules,
MacroRulesNot,
MacroRulesNotName,
Macro,
MacroName,
MacroNameParen,
}
fn check_nested_occurrences(
sess: &ParseSess,
node_id: NodeId,
tts: &[TokenTree],
macros: &Stack<'_, MacroState<'_>>,
binders: &Binders,
ops: &Stack<'_, KleeneToken>,
valid: &mut bool,
) {
let mut state = NestedMacroState::Empty;
let nested_macros = macros.push(MacroState { binders, ops: ops.into() });
let mut nested_binders = Binders::default();
for tt in tts {
match (state, tt) {
(
NestedMacroState::Empty,
&TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }),
) => {
if name == kw::MacroRules {
state = NestedMacroState::MacroRules;
} else if name == kw::Macro {
state = NestedMacroState::Macro;
}
}
(
NestedMacroState::MacroRules,
&TokenTree::Token(Token { kind: TokenKind::Not, .. }),
) => {
state = NestedMacroState::MacroRulesNot;
}
(
NestedMacroState::MacroRulesNot,
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
) => {
state = NestedMacroState::MacroRulesNotName;
}
(NestedMacroState::MacroRulesNot, &TokenTree::MetaVar(..)) => {
state = NestedMacroState::MacroRulesNotName;
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
}
(NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
| (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
if del.delim == DelimToken::Brace =>
{
let macro_rules = state == NestedMacroState::MacroRulesNotName;
state = NestedMacroState::Empty;
let rest =
check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, valid);
check_nested_occurrences(
sess,
node_id,
&del.tts[rest..],
macros,
binders,
ops,
valid,
);
}
(
NestedMacroState::Macro,
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
) => {
state = NestedMacroState::MacroName;
}
(NestedMacroState::Macro, &TokenTree::MetaVar(..)) => {
state = NestedMacroState::MacroName;
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
}
(NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
if del.delim == DelimToken::Paren =>
{
state = NestedMacroState::MacroNameParen;
nested_binders = Binders::default();
check_binders(
sess,
node_id,
tt,
&nested_macros,
&mut nested_binders,
&Stack::Empty,
valid,
);
}
(NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
if del.delim == DelimToken::Brace =>
{
state = NestedMacroState::Empty;
check_occurrences(
sess,
node_id,
tt,
&nested_macros,
&nested_binders,
&Stack::Empty,
valid,
);
}
(_, ref tt) => {
state = NestedMacroState::Empty;
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
}
}
}
}
fn check_nested_macro(
sess: &ParseSess,
node_id: NodeId,
macro_rules: bool,
tts: &[TokenTree],
macros: &Stack<'_, MacroState<'_>>,
valid: &mut bool,
) -> usize {
let n = tts.len();
let mut i = 0;
let separator = if macro_rules { TokenKind::Semi } else { TokenKind::Comma };
loop {
if i + 2 >= n
|| !tts[i].is_delimited()
|| !tts[i + 1].is_token(&TokenKind::FatArrow)
|| !tts[i + 2].is_delimited()
{
break;
}
let lhs = &tts[i];
let rhs = &tts[i + 2];
let mut binders = Binders::default();
check_binders(sess, node_id, lhs, macros, &mut binders, &Stack::Empty, valid);
check_occurrences(sess, node_id, rhs, macros, &binders, &Stack::Empty, valid);
i += 3;
if i == n || !tts[i].is_token(&separator) {
break;
}
i += 1;
}
i
}
fn check_ops_is_prefix(
sess: &ParseSess,
node_id: NodeId,
macros: &Stack<'_, MacroState<'_>>,
binders: &Binders,
ops: &Stack<'_, KleeneToken>,
span: Span,
name: MacroRulesNormalizedIdent,
) {
let macros = macros.push(MacroState { binders, ops: ops.into() });
let mut acc: SmallVec<[&SmallVec<[KleeneToken; 1]>; 1]> = SmallVec::new();
for state in ¯os {
acc.push(&state.ops);
if let Some(binder) = state.binders.get(&name) {
let mut occurrence_ops: SmallVec<[KleeneToken; 2]> = SmallVec::new();
for ops in acc.iter().rev() {
occurrence_ops.extend_from_slice(ops);
}
ops_is_prefix(sess, node_id, span, name, &binder.ops, &occurrence_ops);
return;
}
}
buffer_lint(sess, span.into(), node_id, &format!("unknown macro variable `{}`", name));
}
fn ops_is_prefix(
sess: &ParseSess,
node_id: NodeId,
span: Span,
name: MacroRulesNormalizedIdent,
binder_ops: &[KleeneToken],
occurrence_ops: &[KleeneToken],
) {
for (i, binder) in binder_ops.iter().enumerate() {
if i >= occurrence_ops.len() {
let mut span = MultiSpan::from_span(span);
span.push_span_label(binder.span, "expected repetition".into());
let message = &format!("variable '{}' is still repeating at this depth", name);
buffer_lint(sess, span, node_id, message);
return;
}
let occurrence = &occurrence_ops[i];
if occurrence.op != binder.op {
let mut span = MultiSpan::from_span(span);
span.push_span_label(binder.span, "expected repetition".into());
span.push_span_label(occurrence.span, "conflicting repetition".into());
let message = "meta-variable repeats with different Kleene operator";
buffer_lint(sess, span, node_id, message);
return;
}
}
}
fn buffer_lint(sess: &ParseSess, span: MultiSpan, node_id: NodeId, message: &str) {
sess.buffer_lint(&META_VARIABLE_MISUSE, span, node_id, message);
}