use crate::ast::NodeId;
use crate::early_buffered_lints::BufferedEarlyLintId;
use crate::ext::tt::macro_parser;
use crate::feature_gate::Features;
use crate::parse::{token, ParseSess};
use crate::print::pprust;
use crate::tokenstream::{self, DelimSpan};
use crate::ast;
use crate::symbol::kw;
use syntax_pos::{edition::Edition, BytePos, Span};
use rustc_data_structures::sync::Lrc;
use std::iter::Peekable;
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub struct Delimited {
pub delim: token::DelimToken,
pub tts: Vec<TokenTree>,
}
impl Delimited {
pub fn open_token(&self) -> token::Token {
token::OpenDelim(self.delim)
}
pub fn close_token(&self) -> token::Token {
token::CloseDelim(self.delim)
}
pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span.is_dummy() {
span
} else {
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
};
TokenTree::Token(open_span, self.open_token())
}
pub fn close_tt(&self, span: Span) -> TokenTree {
let close_span = if span.is_dummy() {
span
} else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
};
TokenTree::Token(close_span, self.close_token())
}
}
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub struct SequenceRepetition {
pub tts: Vec<TokenTree>,
pub separator: Option<token::Token>,
pub op: KleeneOp,
pub num_captures: usize,
}
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum KleeneOp {
ZeroOrMore,
OneOrMore,
ZeroOrOne,
}
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum TokenTree {
Token(Span, token::Token),
Delimited(DelimSpan, Lrc<Delimited>),
Sequence(DelimSpan, Lrc<SequenceRepetition>),
MetaVar(Span, ast::Ident),
MetaVarDecl(
Span,
ast::Ident,
ast::Ident,
),
}
impl TokenTree {
pub fn len(&self) -> usize {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.len(),
_ => delimed.tts.len() + 2,
},
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
_ => 0,
}
}
pub fn is_empty(&self) -> bool {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.is_empty(),
_ => false,
},
TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
_ => true,
}
}
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
delimed.tts[index].clone()
}
(&TokenTree::Delimited(span, ref delimed), _) => {
if index == 0 {
return delimed.open_tt(span.open);
}
if index == delimed.tts.len() + 1 {
return delimed.close_tt(span.close);
}
delimed.tts[index - 1].clone()
}
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
_ => panic!("Cannot expand a token tree"),
}
}
pub fn span(&self) -> Span {
match *self {
TokenTree::Token(sp, _)
| TokenTree::MetaVar(sp, _)
| TokenTree::MetaVarDecl(sp, _, _) => sp,
TokenTree::Delimited(sp, _)
| TokenTree::Sequence(sp, _) => sp.entire(),
}
}
}
pub fn parse(
input: tokenstream::TokenStream,
expect_matchers: bool,
sess: &ParseSess,
features: &Features,
attrs: &[ast::Attribute],
edition: Edition,
macro_node_id: NodeId,
) -> Vec<TokenTree> {
let mut result = Vec::new();
let mut trees = input.trees().peekable();
while let Some(tree) = trees.next() {
let tree = parse_tree(
tree,
&mut trees,
expect_matchers,
sess,
features,
attrs,
edition,
macro_node_id,
);
match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
Some((kind, _)) => {
let span = end_sp.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue;
}
_ => end_sp,
},
tree => tree
.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(span),
},
tree => tree
.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(start_sp),
};
sess.missing_fragment_specifiers.borrow_mut().insert(span);
result.push(TokenTree::MetaVarDecl(
span,
ident,
ast::Ident::invalid(),
));
}
_ => result.push(tree),
}
}
result
}
fn parse_tree<I>(
tree: tokenstream::TokenTree,
trees: &mut Peekable<I>,
expect_matchers: bool,
sess: &ParseSess,
features: &Features,
attrs: &[ast::Attribute],
edition: Edition,
macro_node_id: NodeId,
) -> TokenTree
where
I: Iterator<Item = tokenstream::TokenTree>,
{
match tree {
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
if delim != token::Paren {
let tok = pprust::token_to_string(&token::OpenDelim(delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span.entire(), &msg);
}
let sequence = parse(
tts.into(),
expect_matchers,
sess,
features,
attrs,
edition,
macro_node_id,
);
let (separator, op) =
parse_sep_and_kleene_op(
trees,
span.entire(),
sess,
features,
attrs,
edition,
macro_node_id,
);
let name_captures = macro_parser::count_names(&sequence);
TokenTree::Sequence(
span,
Lrc::new(SequenceRepetition {
tts: sequence,
separator,
op,
num_captures: name_captures,
}),
)
}
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw {
let ident = ast::Ident::new(kw::DollarCrate, ident.span);
TokenTree::Token(span, token::Ident(ident, is_raw))
} else {
TokenTree::MetaVar(span, ident)
}
}
Some(tokenstream::TokenTree::Token(span, tok)) => {
let msg = format!(
"expected identifier, found `{}`",
pprust::token_to_string(&tok)
);
sess.span_diagnostic.span_err(span, &msg);
TokenTree::MetaVar(span, ast::Ident::invalid())
}
None => TokenTree::Token(span, token::Dollar),
},
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
Lrc::new(Delimited {
delim: delim,
tts: parse(
tts.into(),
expect_matchers,
sess,
features,
attrs,
edition,
macro_node_id,
),
}),
),
}
}
fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
match *token {
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
token::Question => Some(KleeneOp::ZeroOrOne),
_ => None,
}
}
fn parse_kleene_op<I>(
input: &mut I,
span: Span,
) -> Result<Result<(KleeneOp, Span), (token::Token, Span)>, Span>
where
I: Iterator<Item = tokenstream::TokenTree>,
{
match input.next() {
Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
Some(op) => Ok(Ok((op, span))),
None => Ok(Err((tok, span))),
},
tree => Err(tree
.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(span)),
}
}
fn parse_sep_and_kleene_op<I>(
input: &mut Peekable<I>,
span: Span,
sess: &ParseSess,
features: &Features,
attrs: &[ast::Attribute],
edition: Edition,
macro_node_id: NodeId,
) -> (Option<token::Token>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
match edition {
Edition::Edition2015 => parse_sep_and_kleene_op_2015(
input,
span,
sess,
features,
attrs,
macro_node_id,
),
Edition::Edition2018 => parse_sep_and_kleene_op_2018(input, span, sess, features, attrs),
}
}
fn parse_sep_and_kleene_op_2015<I>(
input: &mut Peekable<I>,
span: Span,
sess: &ParseSess,
_features: &Features,
_attrs: &[ast::Attribute],
macro_node_id: NodeId,
) -> (Option<token::Token>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
let span = match parse_kleene_op(input, span) {
Ok(Ok((op, _))) if op != KleeneOp::ZeroOrOne => return (None, op),
Ok(Ok((op, op1_span))) => {
assert_eq!(op, KleeneOp::ZeroOrOne);
let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
kleene_op(tok2).is_some()
} else {
false
};
if is_1_sep {
match parse_kleene_op(input, span) {
Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
sess.span_diagnostic
.struct_span_err(op2_span, "expected `*` or `+`")
.note("`?` is not a macro repetition operator in the 2015 edition, \
but is accepted in the 2018 edition")
.emit();
return (None, KleeneOp::ZeroOrMore);
}
Ok(Ok((op, _))) => {
sess.buffer_lint(
BufferedEarlyLintId::QuestionMarkMacroSep,
op1_span,
macro_node_id,
"using `?` as a separator is deprecated and will be \
a hard error in an upcoming edition",
);
return (Some(token::Question), op);
}
Ok(Err((_, _))) => op1_span,
Err(_) => op1_span,
}
} else {
sess.span_diagnostic
.struct_span_err(op1_span, "expected `*` or `+`")
.note("`?` is not a macro repetition operator in the 2015 edition, \
but is accepted in the 2018 edition")
.emit();
return (None, KleeneOp::ZeroOrMore);
}
}
Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
sess.span_diagnostic
.struct_span_err(op2_span, "expected `*` or `+`")
.note("`?` is not a macro repetition operator in the 2015 edition, \
but is accepted in the 2018 edition")
.emit();
return (None, KleeneOp::ZeroOrMore);
}
Ok(Ok((op, _))) => return (Some(tok), op),
Ok(Err((_, span))) => span,
Err(span) => span,
},
Err(span) => span,
};
sess.span_diagnostic.span_err(span, "expected `*` or `+`");
(None, KleeneOp::ZeroOrMore)
}
fn parse_sep_and_kleene_op_2018<I>(
input: &mut Peekable<I>,
span: Span,
sess: &ParseSess,
_features: &Features,
_attrs: &[ast::Attribute],
) -> (Option<token::Token>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
let span = match parse_kleene_op(input, span) {
Ok(Ok((op, _op1_span))) if op == KleeneOp::ZeroOrOne => {
return (None, op);
}
Ok(Ok((op, _))) => return (None, op),
Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => {
sess.span_diagnostic.span_err(
span,
"the `?` macro repetition operator does not take a separator",
);
return (None, KleeneOp::ZeroOrMore);
}
Ok(Ok((op, _))) => return (Some(tok), op),
Ok(Err((_, span))) => span,
Err(span) => span,
},
Err(span) => span,
};
sess.span_diagnostic
.span_err(span, "expected one of: `*`, `+`, or `?`");
(None, KleeneOp::ZeroOrMore)
}