#![allow(unused_variables)]
#![allow(unused_mut)]
#![allow(dead_code)]
use crate::ast;
use crate::ast::*;
use crate::lexer::{Lexer, TokenAndSpan};
use crate::token::*;
use moore_common::{errors::*, name::*, source::*, util::HasSpan};
use std;
use std::collections::VecDeque;
type ParseResult<T> = Result<T, DiagBuilder2>;
type ReportedResult<T> = Result<T, ()>;
trait AbstractParser {
fn peek(&mut self, offset: usize) -> TokenAndSpan;
fn bump(&mut self);
fn skip(&mut self);
fn consumed(&self) -> usize;
fn last_span(&self) -> Span;
fn add_diag(&mut self, diag: DiagBuilder2);
fn severity(&self) -> Severity;
fn try_eat_ident(&mut self) -> Option<(Name, Span)> {
match self.peek(0) {
(Ident(name), span) => {
self.bump();
Some((name, span))
}
(EscIdent(name), span) => {
self.bump();
Some((name, span))
}
_ => None,
}
}
fn eat_ident_or(&mut self, msg: &str) -> ParseResult<(Name, Span)> {
match self.peek(0) {
(Ident(name), span) => {
self.bump();
Ok((name, span))
}
(EscIdent(name), span) => {
self.bump();
Ok((name, span))
}
(tkn, span) => {
Err(DiagBuilder2::error(format!("Expected {} before `{}`", msg, tkn)).span(span))
}
}
}
fn eat_ident(&mut self, msg: &str) -> ReportedResult<(Name, Span)> {
match self.peek(0) {
(Ident(name), span) => {
self.bump();
Ok((name, span))
}
(EscIdent(name), span) => {
self.bump();
Ok((name, span))
}
(tkn, span) => {
self.add_diag(
DiagBuilder2::error(format!("Expected {} before `{}`", msg, tkn)).span(span),
);
Err(())
}
}
}
fn is_ident(&mut self) -> bool {
match self.peek(0).0 {
Ident(_) | EscIdent(_) => true,
_ => false,
}
}
fn require(&mut self, expect: Token) -> Result<(), DiagBuilder2> {
match self.peek(0) {
(actual, _) if actual == expect => {
self.bump();
Ok(())
}
(wrong, span) => Err(DiagBuilder2::error(format!(
"Expected `{}`, but found `{}` instead",
expect, wrong
))
.span(span)),
}
}
fn require_reported(&mut self, expect: Token) -> ReportedResult<()> {
match self.require(expect) {
Ok(x) => Ok(x),
Err(e) => {
self.add_diag(e);
Err(())
}
}
}
fn try_eat(&mut self, expect: Token) -> bool {
match self.peek(0) {
(actual, _) if actual == expect => {
self.bump();
true
}
_ => false,
}
}
fn recover_balanced(&mut self, terminators: &[Token], eat_terminator: bool) {
let mut stack = Vec::new();
loop {
let (tkn, sp) = self.peek(0);
if stack.is_empty() {
for t in terminators {
if *t == tkn {
if eat_terminator {
self.skip();
}
return;
}
}
}
match tkn {
OpenDelim(x) => stack.push(x),
CloseDelim(x) => {
if let Some(open) = stack.pop() {
if open != x {
self.add_diag(DiagBuilder2::fatal(format!("Found closing `{}` which is not the complement to the previous opening `{}`", CloseDelim(x), OpenDelim(open))).span(sp));
break;
}
} else {
self.add_diag(
DiagBuilder2::fatal(format!(
"Found closing `{}` without an earlier opening `{}`",
CloseDelim(x),
OpenDelim(x)
))
.span(sp),
);
break;
}
}
Eof => break,
_ => (),
}
self.skip();
}
}
fn is_fatal(&self) -> bool {
self.severity() >= Severity::Fatal
}
fn is_error(&self) -> bool {
self.severity() >= Severity::Error
}
fn anticipate(&mut self, tokens: &[Token]) -> ReportedResult<()> {
let (tkn, sp) = self.peek(0);
for t in tokens {
if *t == tkn {
return Ok(());
}
}
self.add_diag(
DiagBuilder2::error(format!(
"Expected {:?}, but found {:?} instead",
tokens, tkn
))
.span(sp),
);
Err(())
}
}
struct Parser<'a> {
input: Lexer<'a>,
queue: VecDeque<TokenAndSpan>,
diagnostics: Vec<DiagBuilder2>,
last_span: Span,
severity: Severity,
consumed: usize,
}
impl<'a> AbstractParser for Parser<'a> {
fn peek(&mut self, offset: usize) -> TokenAndSpan {
self.ensure_queue_filled(offset);
if offset < self.queue.len() {
self.queue[offset]
} else {
*self
.queue
.back()
.expect("At least an Eof token should be in the queue")
}
}
fn bump(&mut self) {
if self.queue.is_empty() {
self.ensure_queue_filled(1);
}
if let Some((_, sp)) = self.queue.pop_front() {
self.last_span = sp;
self.consumed += 1;
}
}
fn skip(&mut self) {
self.bump()
}
fn consumed(&self) -> usize {
self.consumed
}
fn last_span(&self) -> Span {
self.last_span
}
fn add_diag(&mut self, diag: DiagBuilder2) {
eprintln!("");
eprintln!("{}", diag);
if diag.get_severity() > self.severity {
self.severity = diag.get_severity();
}
self.diagnostics.push(diag);
}
fn severity(&self) -> Severity {
self.severity
}
}
impl<'a> Parser<'a> {
fn new(input: Lexer) -> Parser {
Parser {
input: input,
queue: VecDeque::new(),
diagnostics: Vec::new(),
last_span: INVALID_SPAN,
severity: Severity::Note,
consumed: 0,
}
}
fn ensure_queue_filled(&mut self, min_tokens: usize) {
if let Some(&(Eof, _)) = self.queue.back() {
return;
}
while self.queue.len() <= min_tokens {
match self.input.next_token() {
Ok((Eof, sp)) => self.queue.push_back((Eof, sp)),
Ok(tkn) => self.queue.push_back(tkn),
Err(x) => self.add_diag(x),
}
}
}
}
fn flanked<R, F>(p: &mut dyn AbstractParser, delim: DelimToken, mut inner: F) -> ReportedResult<R>
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R>,
{
p.require_reported(OpenDelim(delim))?;
match inner(p) {
Ok(r) => match p.require_reported(CloseDelim(delim)) {
Ok(_) => Ok(r),
Err(e) => {
p.recover_balanced(&[CloseDelim(delim)], true);
Err(e)
}
},
Err(e) => {
p.recover_balanced(&[CloseDelim(delim)], true);
Err(e)
}
}
}
fn try_flanked<R, F>(
p: &mut dyn AbstractParser,
delim: DelimToken,
inner: F,
) -> ReportedResult<Option<R>>
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R>,
{
if p.peek(0).0 == OpenDelim(delim) {
flanked(p, delim, inner).map(|r| Some(r))
} else {
Ok(None)
}
}
fn comma_list<R, F, T>(
p: &mut dyn AbstractParser,
mut term: T,
msg: &str,
mut item: F,
) -> ReportedResult<Vec<R>>
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R>,
T: Predicate,
{
let mut v = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Eof && !term.matches(p) {
match item(p) {
Ok(x) => v.push(x),
Err(e) => {
term.recover(p, false);
return Err(e);
}
}
if term.matches(p) {
break;
} else if p.try_eat(Comma) {
if term.matches(p) {
let q = p.last_span();
p.add_diag(DiagBuilder2::warning("Superfluous trailing comma").span(q));
break;
}
} else {
let sp = p.peek(0).1;
p.add_diag(
DiagBuilder2::error(format!("Expected , or {} after {}", term.describe(), msg))
.span(sp),
);
term.recover(p, false);
return Err(());
}
}
Ok(v)
}
fn comma_list_nonempty<R, F, T>(
p: &mut dyn AbstractParser,
term: T,
msg: &str,
item: F,
) -> ReportedResult<Vec<R>>
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R>,
T: Predicate,
{
let q = p.peek(0).1;
let v = comma_list(p, term, msg, item)?;
if v.is_empty() {
p.add_diag(DiagBuilder2::error(format!("Expected at least one {}", msg)).span(q));
Err(())
} else {
Ok(v)
}
}
fn repeat_until<R, F>(
p: &mut dyn AbstractParser,
term: Token,
mut item: F,
) -> ReportedResult<Vec<R>>
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R>,
{
let mut v = Vec::new();
while p.peek(0).0 != term && p.peek(0).0 != Eof {
match item(p) {
Ok(x) => v.push(x),
Err(_) => {
p.recover_balanced(&[term], false);
break;
}
}
}
Ok(v)
}
fn recovered<R, F>(p: &mut dyn AbstractParser, term: Token, mut item: F) -> ReportedResult<R>
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R>,
{
match item(p) {
Ok(x) => Ok(x),
Err(e) => {
p.recover_balanced(&[term], false);
Err(e)
}
}
}
#[allow(dead_code)]
fn r#try<R, F>(p: &mut dyn AbstractParser, mut parse: F) -> Option<R>
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R>,
{
let mut bp = BranchParser::new(p);
match parse(&mut bp) {
Ok(r) => {
bp.commit();
Some(r)
}
Err(_) => None,
}
}
fn parse_identifier<M: std::fmt::Display>(
p: &mut dyn AbstractParser,
msg: M,
) -> ReportedResult<ast::Identifier> {
let (tkn, span) = p.peek(0);
match tkn {
Ident(n) | EscIdent(n) => {
p.bump();
Ok(ast::Identifier {
id: DUMMY_NODE_ID,
span: span,
name: n,
})
}
x => {
p.add_diag(
DiagBuilder2::error(format!("expected {}, but found {} instead", msg, x))
.span(span),
);
Err(())
}
}
}
fn try_identifier(p: &mut dyn AbstractParser) -> ReportedResult<Option<ast::Identifier>> {
let (tkn, span) = p.peek(0);
match tkn {
Ident(n) | EscIdent(n) => {
p.bump();
Ok(Some(ast::Identifier {
id: DUMMY_NODE_ID,
span: span,
name: n,
}))
}
_ => Ok(None),
}
}
trait Predicate {
fn matches(&mut self, _: &mut dyn AbstractParser) -> bool;
fn recover(&mut self, _: &mut dyn AbstractParser, consume: bool);
fn describe(&self) -> String;
}
impl Predicate for Token {
fn matches(&mut self, p: &mut dyn AbstractParser) -> bool {
p.peek(0).0 == *self
}
fn recover(&mut self, p: &mut dyn AbstractParser, consume: bool) {
p.recover_balanced(&[*self], consume)
}
fn describe(&self) -> String {
self.as_str().into()
}
}
struct FuncPredicate<
M: FnMut(&mut dyn AbstractParser) -> bool,
R: FnMut(&mut dyn AbstractParser, bool),
> {
match_func: M,
recover_func: R,
desc: &'static str,
}
impl<M: FnMut(&mut dyn AbstractParser) -> bool, R: FnMut(&mut dyn AbstractParser, bool)> Predicate
for FuncPredicate<M, R>
{
fn matches(&mut self, p: &mut dyn AbstractParser) -> bool {
(self.match_func)(p)
}
fn recover(&mut self, p: &mut dyn AbstractParser, consume: bool) {
(self.recover_func)(p, consume)
}
fn describe(&self) -> String {
self.desc.into()
}
}
pub fn parse(input: Lexer) -> Result<Root, ()> {
let mut p = Parser::new(input);
let root = parse_source_text(&mut p);
if p.is_error() {
Err(())
} else {
Ok(root)
}
}
fn parse_source_text(p: &mut Parser) -> Root {
let mut root = Root {
timeunits: Timeunit {
unit: None,
prec: None,
},
items: Vec::new(),
};
match parse_time_units(p) {
Ok(x) => root.timeunits = x,
Err(()) => (),
}
while !p.is_fatal() && p.peek(0).0 != Eof {
match parse_item(p) {
Ok(item) => root.items.push(item),
Err(()) => (),
}
}
root
}
fn parse_time_units(p: &mut dyn AbstractParser) -> ReportedResult<Timeunit> {
let mut unit = None;
let mut prec = None;
while p.peek(0).0 == Keyword(Kw::Timeunit) || p.peek(0).0 == Keyword(Kw::Timeprecision) {
recovered(p, Semicolon, |p| {
if p.try_eat(Keyword(Kw::Timeunit)) {
unit = Some(parse_time_literal(p)?);
if p.try_eat(Operator(Op::Div)) {
prec = Some(parse_time_literal(p)?);
}
} else if p.try_eat(Keyword(Kw::Timeprecision)) {
prec = Some(parse_time_literal(p)?);
} else {
unreachable!();
}
Ok(())
})?;
p.require_reported(Semicolon)?;
}
Ok(Timeunit { unit, prec })
}
fn parse_time_literal(p: &mut dyn AbstractParser) -> ReportedResult<Spanned<Lit>> {
let (tkn, sp) = p.peek(0);
match tkn {
Literal(lit @ Time(..)) => {
p.bump();
Ok(Spanned::new(lit, sp))
}
_ => {
p.add_diag(
DiagBuilder2::error(format!("expected time literal, instead got `{}`", tkn))
.span(sp),
);
Err(())
}
}
}
fn parse_item(p: &mut Parser) -> ReportedResult<ast::Item> {
let (tkn, sp) = p.peek(0);
match tkn {
Keyword(Kw::Module) => parse_module_decl(p).map(|d| ast::Item::Module(d)),
Keyword(Kw::Interface) => parse_interface_decl(p).map(|d| ast::Item::Interface(d)),
Keyword(Kw::Package) => parse_package_decl(p).map(|d| ast::Item::Package(d)),
Keyword(Kw::Class) => parse_class_decl(p).map(|d| ast::Item::Class(d)),
Keyword(Kw::Import) => {
parse_import_decl(p).map(|i| ast::Item::Item(HierarchyItem::ImportDecl(i)))
}
Keyword(Kw::Typedef) => {
parse_typedef(p).map(|d| ast::Item::Item(HierarchyItem::Typedef(d)))
}
tkn => {
p.add_diag(DiagBuilder2::error(format!("Expected module, interface, package, program, class, import, or typedef, instead got `{}`", tkn)).span(sp));
p.recover_balanced(
&[
Keyword(Kw::Module),
Keyword(Kw::Interface),
Keyword(Kw::Package),
Keyword(Kw::Program),
Keyword(Kw::Class),
Keyword(Kw::Typedef),
],
false,
);
Err(())
}
}
}
fn as_lifetime(tkn: Token) -> Option<Lifetime> {
match tkn {
Keyword(Kw::Static) => Some(Lifetime::Static),
Keyword(Kw::Automatic) => Some(Lifetime::Automatic),
_ => None,
}
}
fn parse_interface_decl(p: &mut Parser) -> ReportedResult<IntfDecl> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Interface))?;
let result = recovered(p, Keyword(Kw::Endinterface), |p| {
let lifetime = match as_lifetime(p.peek(0).0) {
Some(l) => {
p.bump();
l
}
None => Lifetime::Static,
};
let (name, name_sp) = p.eat_ident("interface name")?;
let param_ports = if p.try_eat(Hashtag) {
parse_parameter_port_list(p)?
} else {
Vec::new()
};
let ports = if p.try_eat(OpenDelim(Paren)) {
parse_port_list(p)?
} else {
Vec::new()
};
if !p.try_eat(Semicolon) {
let q = p.peek(0).1.end();
p.add_diag(
DiagBuilder2::error(format!(
"Missing semicolon \";\" after header of interface \"{}\"",
name
))
.span(q),
);
}
let mut items = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Keyword(Kw::Endinterface) && p.peek(0).0 != Eof {
if p.try_eat(Semicolon) {
continue;
}
items.push(parse_hierarchy_item(p)?);
}
span.expand(p.last_span());
Ok(IntfDecl {
id: DUMMY_NODE_ID,
span: span,
lifetime: lifetime,
name: name,
name_span: name_sp,
params: param_ports,
ports: ports,
items: items,
})
});
p.require_reported(Keyword(Kw::Endinterface))?;
result
}
fn parse_parameter_port_list(p: &mut dyn AbstractParser) -> ReportedResult<Vec<ParamDecl>> {
let mut local = false;
flanked(p, Paren, |p| {
comma_list(p, CloseDelim(Paren), "parameter port", |p| {
let mut outer_span = p.peek(0).1;
match p.peek(0).0 {
Keyword(Kw::Parameter) => {
p.bump();
local = false;
}
Keyword(Kw::Localparam) => {
p.bump();
local = true;
}
_ => (),
};
let kind = if p.try_eat(Keyword(Kw::Type)) {
let mut span = p.peek(0).1;
let name = parse_identifier(p, "parameter name")?;
let ty = if p.try_eat(Operator(Op::Assign)) {
Some(parse_explicit_type(p)?)
} else {
None
};
p.anticipate(&[Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
ast::ParamKind::Type(vec![ast::ParamTypeDecl {
span: span,
name: name,
ty: ty,
}])
} else {
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
tail(p, ty)
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
tail(p, ty)
});
fn tail(
p: &mut dyn AbstractParser,
ty: Type,
) -> ReportedResult<ast::ParamValueDecl> {
let mut span = p.peek(0).1;
let name = parse_identifier(p, "parameter name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
Ok(ast::ParamValueDecl {
span: span,
ty: ty,
name: name,
dims: dims,
expr: expr,
})
}
ast::ParamKind::Value(vec![pp.finish(p, "explicit or implicit type")?])
};
outer_span.expand(p.last_span());
Ok(ast::ParamDecl {
span: outer_span,
local: local,
kind: kind,
})
})
})
}
fn parse_constant_expr(p: &mut dyn AbstractParser) -> ReportedResult<()> {
parse_expr(p)?;
Ok(())
}
fn parse_module_decl(p: &mut Parser) -> ReportedResult<ModDecl> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Module))?;
let result = recovered(p, Keyword(Kw::Endmodule), |p| {
let lifetime = match as_lifetime(p.peek(0).0) {
Some(l) => {
p.bump();
l
}
None => Lifetime::Static,
};
let (name, name_sp) = p.eat_ident("module name")?;
let params = if p.try_eat(Hashtag) {
parse_parameter_port_list(p)?
} else {
Vec::new()
};
let ports = if p.try_eat(OpenDelim(Paren)) {
parse_port_list(p)?
} else {
Vec::new()
};
if !p.try_eat(Semicolon) {
let q = p.peek(0).1.end();
p.add_diag(
DiagBuilder2::error(format!("Missing ; after header of module \"{}\"", name))
.span(q),
);
}
let mut items = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Keyword(Kw::Endmodule) && p.peek(0).0 != Eof {
if p.try_eat(Semicolon) {
continue;
}
items.push(parse_hierarchy_item(p)?);
}
span.expand(p.last_span());
Ok(ModDecl {
id: DUMMY_NODE_ID,
span: span,
lifetime: lifetime,
name: name,
name_span: name_sp,
params: params,
ports: ports,
items: items,
})
});
let sp = p.peek(0).1;
p.require_reported(Keyword(Kw::Endmodule))?;
if p.try_eat(Colon) {
p.eat_ident("module name")?;
}
result
}
fn parse_package_decl(p: &mut dyn AbstractParser) -> ReportedResult<PackageDecl> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Package))?;
let result = recovered(p, Keyword(Kw::Endpackage), |p| {
let lifetime = match as_lifetime(p.peek(0).0) {
Some(x) => {
p.bump();
x
}
None => Lifetime::Static,
};
let (name, name_span) = p.eat_ident("package name")?;
p.require_reported(Semicolon)?;
let timeunits = Timeunit {
unit: None,
prec: None,
};
let mut items = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Keyword(Kw::Endpackage) && p.peek(0).0 != Eof {
if p.try_eat(Semicolon) {
continue;
}
items.push(parse_hierarchy_item(p)?);
}
span.expand(p.last_span());
Ok(PackageDecl {
id: DUMMY_NODE_ID,
span: span,
lifetime: lifetime,
name: name,
name_span: name_span,
timeunits: timeunits,
items: items,
})
});
p.require_reported(Keyword(Kw::Endpackage))?;
if p.try_eat(Colon) {
p.eat_ident("package name")?;
}
result
}
fn parse_program_decl(p: &mut dyn AbstractParser) -> ReportedResult<()> {
p.require_reported(Keyword(Kw::Program))?;
let result = recovered(p, Keyword(Kw::Endprogram), |p| {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("Don't know how to parse program declarations").span(q));
Err(())
});
p.require_reported(Keyword(Kw::Endprogram))?;
result
}
fn parse_hierarchy_item(p: &mut dyn AbstractParser) -> ReportedResult<HierarchyItem> {
if p.is_ident() && p.peek(1).0 == Colon {
p.bump();
p.bump();
}
match p.peek(0).0 {
Keyword(Kw::Localparam) | Keyword(Kw::Parameter) => {
let decl = parse_param_decl(p, false)?;
p.require_reported(Semicolon)?;
return Ok(HierarchyItem::ParamDecl(decl));
}
Keyword(Kw::Modport) => {
return parse_modport_decl(p).map(|x| HierarchyItem::ModportDecl(x))
}
Keyword(Kw::Class) => return parse_class_decl(p).map(|x| HierarchyItem::ClassDecl(x)),
Keyword(Kw::Typedef) => return parse_typedef(p).map(|x| HierarchyItem::Typedef(x)),
Keyword(Kw::Import) => return parse_import_decl(p).map(|x| HierarchyItem::ImportDecl(x)),
Keyword(Kw::Initial) => {
return parse_procedure(p, ProcedureKind::Initial).map(|x| HierarchyItem::Procedure(x));
}
Keyword(Kw::Always) => {
return parse_procedure(p, ProcedureKind::Always).map(|x| HierarchyItem::Procedure(x));
}
Keyword(Kw::AlwaysComb) => {
return parse_procedure(p, ProcedureKind::AlwaysComb)
.map(|x| HierarchyItem::Procedure(x));
}
Keyword(Kw::AlwaysLatch) => {
return parse_procedure(p, ProcedureKind::AlwaysLatch)
.map(|x| HierarchyItem::Procedure(x));
}
Keyword(Kw::AlwaysFf) => {
return parse_procedure(p, ProcedureKind::AlwaysFf)
.map(|x| HierarchyItem::Procedure(x));
}
Keyword(Kw::Final) => {
return parse_procedure(p, ProcedureKind::Final).map(|x| HierarchyItem::Procedure(x));
}
Keyword(Kw::Function) | Keyword(Kw::Task) => {
return parse_subroutine_decl(p).map(|x| HierarchyItem::SubroutineDecl(x));
}
Keyword(Kw::Inout) | Keyword(Kw::Input) | Keyword(Kw::Output) | Keyword(Kw::Ref) => {
return parse_port_decl(p).map(|x| HierarchyItem::PortDecl(x));
}
Keyword(Kw::Assign) => {
return parse_continuous_assign(p).map(|x| HierarchyItem::ContAssign(x));
}
Keyword(Kw::Genvar) => {
p.bump();
let decl = comma_list_nonempty(p, Semicolon, "genvar declaration", parse_genvar_decl)?;
p.require_reported(Semicolon)?;
return Ok(HierarchyItem::GenvarDecl(decl));
}
Keyword(Kw::Generate) => {
let mut span = p.peek(0).1;
p.bump();
let items = repeat_until(p, Keyword(Kw::Endgenerate), parse_generate_item)?;
p.require_reported(Keyword(Kw::Endgenerate))?;
span.expand(p.last_span());
return Ok(HierarchyItem::GenerateRegion(span, items));
}
Keyword(Kw::For) => return parse_generate_for(p).map(|x| HierarchyItem::GenerateFor(x)),
Keyword(Kw::If) => return parse_generate_if(p).map(|x| HierarchyItem::GenerateIf(x)),
Keyword(Kw::Case) => return parse_generate_case(p).map(|x| HierarchyItem::GenerateCase(x)),
Keyword(Kw::Assert)
| Keyword(Kw::Assume)
| Keyword(Kw::Cover)
| Keyword(Kw::Expect)
| Keyword(Kw::Restrict) => return parse_assertion(p).map(|x| HierarchyItem::Assertion(x)),
Semicolon => {
p.bump();
return Ok(HierarchyItem::Dummy);
}
SysIdent(..) => return parse_elab_system_task(p).map(|_| HierarchyItem::Dummy),
_ => (),
}
let mut pp = ParallelParser::new();
pp.add_greedy("net declaration", |p| {
parse_net_decl(p).map(|d| HierarchyItem::NetDecl(d))
});
pp.add("instantiation", |p| {
parse_inst(p).map(|i| HierarchyItem::Inst(i))
});
pp.add("variable declaration", |p| {
parse_var_decl(p).map(|d| HierarchyItem::VarDecl(d))
});
pp.finish(p, "hierarchy item")
}
fn parse_elab_system_task(p: &mut dyn AbstractParser) -> ReportedResult<()> {
let mut span = p.peek(0).1;
let name = match p.peek(0).0 {
SysIdent(name) => name,
_ => unreachable!(),
};
p.recover_balanced(&[Semicolon], true);
span.expand(p.last_span());
p.add_diag(DiagBuilder2::warning("unsupported elaboration system task").span(span));
Ok(())
}
fn parse_localparam_decl(p: &mut dyn AbstractParser) -> ReportedResult<()> {
p.require_reported(Keyword(Kw::Localparam))?;
loop {
let (name, name_sp) = match p.eat_ident_or("parameter name") {
Ok(x) => x,
Err(e) => {
p.add_diag(e);
return Err(());
}
};
if p.try_eat(Operator(Op::Assign)) {
match parse_expr(p) {
Ok(_) => (),
Err(_) => p.recover_balanced(&[Comma, Semicolon], false),
}
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.peek(0).0 == Semicolon {
p.add_diag(DiagBuilder2::warning("Superfluous trailing comma").span(sp));
break;
}
}
(Semicolon, _) => break,
(x, sp) => {
p.add_diag(
DiagBuilder2::error(format!("Expected , or ; after localparam, found {}", x))
.span(sp),
);
return Err(());
}
}
}
p.require_reported(Semicolon)?;
Ok(())
}
fn parse_parameter_decl(p: &mut dyn AbstractParser) -> ReportedResult<()> {
p.require_reported(Keyword(Kw::Parameter))?;
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, ()) = pp.finish(p, "explicit or implicit type")?;
fn tail(p: &mut dyn AbstractParser) -> ReportedResult<()> {
let names = parse_parameter_names(p)?;
p.require_reported(Semicolon)?;
Ok(())
}
return Ok(());
}
fn parse_parameter_names(p: &mut dyn AbstractParser) -> ReportedResult<Vec<()>> {
let v = comma_list_nonempty(p, Semicolon, "parameter name", |p| {
let (name, name_sp) = p.eat_ident("parameter name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
Ok(())
})?;
Ok(v)
}
fn parse_modport_decl(p: &mut dyn AbstractParser) -> ReportedResult<ast::ModportDecl> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Modport))?;
let items = comma_list_nonempty(p, Semicolon, "modport item", parse_modport_item)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ast::ModportDecl {
span: span,
items: items,
})
}
fn parse_modport_item(p: &mut dyn AbstractParser) -> ReportedResult<ast::ModportItem> {
let mut span = p.peek(0).1;
let name = parse_identifier(p, "modport name")?;
let ports = flanked(p, Paren, |p| {
comma_list(
p,
CloseDelim(Paren),
"port declaration",
parse_modport_port_decl,
)
})?;
span.expand(p.last_span());
Ok(ast::ModportItem {
span: span,
name: name,
ports: ports,
})
}
fn parse_modport_port_decl(p: &mut dyn AbstractParser) -> ReportedResult<ast::ModportPort> {
let (tkn, span) = p.peek(0);
if let Some(dir) = as_port_direction(tkn) {
p.bump();
loop {
if p.try_eat(Period) {
let name = parse_identifier(p, "port name")?;
p.require_reported(OpenDelim(Paren))?;
let expr = parse_expr(p)?;
p.require_reported(CloseDelim(Paren))?;
} else {
let name = parse_identifier(p, "port name")?;
}
match (p.peek(0).0, p.peek(1).0) {
(Comma, Keyword(_)) => break,
(Comma, _) => {
p.bump();
continue;
}
_ => break,
}
}
return Ok(ast::ModportPort::Port);
}
if p.try_eat(Keyword(Kw::Clocking)) {
p.add_diag(DiagBuilder2::error("modport clocking declaration not implemented").span(span));
return Err(());
}
p.add_diag(DiagBuilder2::error("Expected port declaration").span(span));
Err(())
}
fn as_port_direction(tkn: Token) -> Option<PortDir> {
match tkn {
Keyword(Kw::Input) => Some(PortDir::Input),
Keyword(Kw::Output) => Some(PortDir::Output),
Keyword(Kw::Inout) => Some(PortDir::Inout),
Keyword(Kw::Ref) => Some(PortDir::Ref),
_ => None,
}
}
fn parse_data_type(p: &mut dyn AbstractParser) -> ReportedResult<Type> {
{
let mut bp = BranchParser::new(p);
match parse_explicit_type(&mut bp) {
Ok(x) => {
bp.commit();
return Ok(x);
}
Err(_) => (),
}
}
parse_implicit_type(p)
}
fn parse_explicit_type(p: &mut dyn AbstractParser) -> ReportedResult<Type> {
let mut span = p.peek(0).1;
let data = parse_type_data(p)?;
span.expand(p.last_span());
let ty = parse_type_signing_and_dimensions(p, span, data)?;
parse_type_suffix(p, ty)
}
fn parse_type_suffix(p: &mut dyn AbstractParser, ty: Type) -> ReportedResult<Type> {
let (tkn, sp) = p.peek(0);
match tkn {
Period => {
p.bump();
let name = parse_identifier(p, "member type name")?;
let subty = parse_type_signing_and_dimensions(
p,
sp,
ScopedType {
ty: Box::new(ty),
member: true,
name: name,
},
)?;
parse_type_suffix(p, subty)
}
Namespace => {
p.bump();
let name = parse_identifier(p, "type name")?;
let subty = parse_type_signing_and_dimensions(
p,
sp,
ScopedType {
ty: Box::new(ty),
member: false,
name: name,
},
)?;
parse_type_suffix(p, subty)
}
Hashtag => {
p.bump();
let params = parse_parameter_assignments(p)?;
let span = Span::union(sp, p.last_span());
parse_type_suffix(
p,
ast::Type {
span: span,
data: ast::SpecializedType(Box::new(ty), params),
sign: ast::TypeSign::None,
dims: Vec::new(),
},
)
}
_ => Ok(ty),
}
}
fn parse_implicit_type(p: &mut dyn AbstractParser) -> ReportedResult<Type> {
let span = p.peek(0).1.begin().into();
parse_type_signing_and_dimensions(p, span, ImplicitType)
}
fn parse_type_signing_and_dimensions(
p: &mut dyn AbstractParser,
mut span: Span,
data: TypeData,
) -> ReportedResult<Type> {
let sign = match p.peek(0).0 {
Keyword(Kw::Signed) => {
p.bump();
TypeSign::Signed
}
Keyword(Kw::Unsigned) => {
p.bump();
TypeSign::Unsigned
}
_ => TypeSign::None,
};
let (dims, _) = parse_optional_dimensions(p)?;
span.expand(p.last_span());
Ok(Type {
span: span,
data: data,
sign: sign,
dims: dims,
})
}
fn parse_type_data(p: &mut dyn AbstractParser) -> ReportedResult<TypeData> {
let (tkn, sp) = p.peek(0);
match tkn {
Keyword(Kw::Void) => {
p.bump();
Ok(ast::VoidType)
}
Keyword(Kw::String) => {
p.bump();
Ok(ast::StringType)
}
Keyword(Kw::Chandle) => {
p.bump();
Ok(ast::ChandleType)
}
Keyword(Kw::Event) => {
p.bump();
Ok(ast::EventType)
}
Keyword(Kw::Signed) => {
p.bump();
Ok(ast::ImplicitSignedType)
}
Keyword(Kw::Unsigned) => {
p.bump();
Ok(ast::ImplicitUnsignedType)
}
Keyword(Kw::Bit) => {
p.bump();
Ok(ast::BitType)
}
Keyword(Kw::Logic) => {
p.bump();
Ok(ast::LogicType)
}
Keyword(Kw::Reg) => {
p.bump();
Ok(ast::RegType)
}
Keyword(Kw::Byte) => {
p.bump();
Ok(ast::ByteType)
}
Keyword(Kw::Shortint) => {
p.bump();
Ok(ast::ShortIntType)
}
Keyword(Kw::Int) => {
p.bump();
Ok(ast::IntType)
}
Keyword(Kw::Longint) => {
p.bump();
Ok(ast::LongIntType)
}
Keyword(Kw::Integer) => {
p.bump();
Ok(ast::IntegerType)
}
Keyword(Kw::Time) => {
p.bump();
Ok(ast::TimeType)
}
Keyword(Kw::Shortreal) => {
p.bump();
Ok(ast::ShortRealType)
}
Keyword(Kw::Real) => {
p.bump();
Ok(ast::RealType)
}
Keyword(Kw::Realtime) => {
p.bump();
Ok(ast::RealtimeType)
}
Keyword(Kw::Enum) => parse_enum_type(p),
Keyword(Kw::Struct) | Keyword(Kw::Union) => parse_struct_type(p),
Ident(n) if &*n.as_str() == "mailbox" => {
p.bump();
Ok(ast::MailboxType)
}
Ident(n) | EscIdent(n) => {
p.bump();
Ok(ast::NamedType(ast::Identifier {
id: DUMMY_NODE_ID,
span: sp,
name: n,
}))
}
Keyword(Kw::Virtual) => {
p.bump();
p.try_eat(Keyword(Kw::Interface));
let (name, _) = p.eat_ident("virtual interface name")?;
Ok(ast::VirtIntfType(name))
}
_ => {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("Expected type").span(q));
return Err(());
}
}
}
fn parse_enum_type(p: &mut dyn AbstractParser) -> ReportedResult<TypeData> {
p.bump();
let base = if p.peek(0).0 != OpenDelim(Brace) {
Some(Box::new(parse_data_type(p)?))
} else {
None
};
let names = flanked(p, Brace, |p| {
comma_list(p, CloseDelim(Brace), "enum name", parse_enum_name)
})?;
Ok(EnumType(base, names))
}
fn parse_enum_name(p: &mut dyn AbstractParser) -> ReportedResult<EnumName> {
let name = parse_identifier(p, "enum name")?;
let mut span = name.span;
let range = try_flanked(p, Brack, parse_expr)?;
let value = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(EnumName {
span: span,
name: name,
range: range,
value: value,
})
}
fn parse_struct_type(p: &mut dyn AbstractParser) -> ReportedResult<TypeData> {
let q = p.peek(0).1;
let kind = match (p.peek(0).0, p.peek(1).0) {
(Keyword(Kw::Struct), _) => {
p.bump();
StructKind::Struct
}
(Keyword(Kw::Union), Keyword(Kw::Tagged)) => {
p.bump();
p.bump();
StructKind::TaggedUnion
}
(Keyword(Kw::Union), _) => {
p.bump();
StructKind::Union
}
_ => {
p.add_diag(
DiagBuilder2::error("Expected `struct`, `union`, or `union tagged`").span(q),
);
return Err(());
}
};
let (packed, signing) = if p.try_eat(Keyword(Kw::Packed)) {
(true, parse_signing(p))
} else {
(false, TypeSign::None)
};
let members = flanked(p, Brace, |p| {
repeat_until(p, CloseDelim(Brace), parse_struct_member)
})?;
Ok(StructType {
kind: kind,
packed: packed,
signing: signing,
members: members,
})
}
fn parse_struct_member(p: &mut dyn AbstractParser) -> ReportedResult<StructMember> {
let mut span = p.peek(0).1;
let rand_qualifier = match p.peek(0).0 {
Keyword(Kw::Rand) => {
p.bump();
Some(RandomQualifier::Rand)
}
Keyword(Kw::Randc) => {
p.bump();
Some(RandomQualifier::Randc)
}
_ => None,
};
let ty = parse_data_type(p)?;
let names = comma_list_nonempty(p, Semicolon, "member name", parse_variable_decl_assignment)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(StructMember {
span: span,
rand_qualifier: rand_qualifier,
ty: Box::new(ty),
names: names,
})
}
fn parse_signing(p: &mut dyn AbstractParser) -> TypeSign {
match p.peek(0).0 {
Keyword(Kw::Signed) => {
p.bump();
TypeSign::Signed
}
Keyword(Kw::Unsigned) => {
p.bump();
TypeSign::Unsigned
}
_ => TypeSign::None,
}
}
fn parse_optional_dimensions(p: &mut dyn AbstractParser) -> ReportedResult<(Vec<TypeDim>, Span)> {
let mut v = Vec::new();
let mut span;
if let Some((d, sp)) = try_dimension(p)? {
span = sp;
v.push(d);
} else {
return Ok((v, INVALID_SPAN));
}
while let Some((d, sp)) = try_dimension(p)? {
v.push(d);
span.expand(sp);
}
Ok((v, span))
}
fn try_dimension(p: &mut dyn AbstractParser) -> ReportedResult<Option<(TypeDim, Span)>> {
if !p.try_eat(OpenDelim(Brack)) {
return Ok(None);
}
let mut span = p.last_span();
let dim = match p.peek(0).0 {
CloseDelim(Brack) => {
p.bump();
TypeDim::Unsized
}
Operator(Op::Mul) => {
p.bump();
TypeDim::Associative
}
Dollar => {
p.bump();
if p.try_eat(Colon) {
Some(parse_expr(p)?)
} else {
None
};
TypeDim::Queue
}
_ => {
let expr = match parse_expr(p) {
Ok(x) => x,
Err(_) => {
p.recover_balanced(&[CloseDelim(Brack)], true);
return Err(());
}
};
if p.try_eat(Colon) {
let other = match parse_expr(p) {
Ok(x) => x,
Err(_) => {
p.recover_balanced(&[CloseDelim(Brack)], true);
return Err(());
}
};
TypeDim::Range(expr, other)
} else {
TypeDim::Expr(expr)
}
}
};
match p.peek(0) {
(CloseDelim(Brack), sp) => {
span.expand(sp);
p.bump();
return Ok(Some((dim, span)));
}
(tkn, sp) => {
p.add_diag(
DiagBuilder2::error(format!(
"Expected closing brackets `]` after dimension, got {}",
tkn
))
.span(sp),
);
p.recover_balanced(&[CloseDelim(Brack)], true);
return Err(());
}
}
}
fn parse_list_of_port_connections(p: &mut dyn AbstractParser) -> ReportedResult<Vec<PortConn>> {
comma_list(p, CloseDelim(Paren), "list of port connections", |p| {
let mut span = p.peek(0).1;
let kind = if p.try_eat(Period) {
if p.try_eat(Operator(Op::Mul)) {
ast::PortConnKind::Auto
} else {
let name = parse_identifier(p, "port name")?;
let mode = try_flanked(p, Paren, |p| {
Ok(if p.peek(0).0 != CloseDelim(Paren) {
ast::PortConnMode::Connected(parse_expr(p)?)
} else {
ast::PortConnMode::Unconnected
})
})?
.unwrap_or(ast::PortConnMode::Auto);
ast::PortConnKind::Named(name, mode)
}
} else {
ast::PortConnKind::Positional(parse_expr(p)?)
};
span.expand(p.last_span());
Ok(ast::PortConn {
span: span,
kind: kind,
})
})
}
fn parse_type_or_expr(
p: &mut dyn AbstractParser,
terminators: &[Token],
) -> ReportedResult<ast::TypeOrExpr> {
let terminators = Vec::from(terminators);
let mut pp = ParallelParser::new();
pp.add_greedy("expression", |p| {
let expr = parse_expr(p)?;
p.anticipate(&terminators)?;
Ok(ast::TypeOrExpr::Expr(expr))
});
pp.add("type", |p| {
let ty = parse_explicit_type(p)?;
p.anticipate(&terminators)?;
Ok(ast::TypeOrExpr::Type(ty))
});
pp.finish(p, "type or expression")
}
fn parse_expr(p: &mut dyn AbstractParser) -> ReportedResult<Expr> {
parse_expr_prec(p, Precedence::Min)
}
fn parse_expr_prec(p: &mut dyn AbstractParser, precedence: Precedence) -> ReportedResult<Expr> {
if p.try_eat(Keyword(Kw::New)) {
let mut span = p.last_span();
if let Some(dim_expr) = try_flanked(p, Brack, parse_expr)? {
let expr = try_flanked(p, Paren, parse_expr)?;
span.expand(p.last_span());
return Ok(Expr {
span: span,
data: ArrayNewExpr(Box::new(dim_expr), expr.map(|x| Box::new(x))),
});
} else {
if let Some(args) = try_flanked(p, Paren, parse_call_args)? {
span.expand(p.last_span());
return Ok(Expr {
span: span,
data: ConstructorCallExpr(args),
});
} else {
let expr = parse_expr(p)?;
span.expand(p.last_span());
return Ok(Expr {
span: span,
data: ClassNewExpr(Some(Box::new(expr))),
});
}
}
}
{
let mut bp = BranchParser::new(p);
let mut span = bp.last_span();
let ty = parse_explicit_type(&mut bp);
let tick = bp.require_reported(Apostrophe);
match (ty, tick) {
(Ok(ty), Ok(())) => {
bp.commit();
let expr = flanked(p, Paren, parse_expr)?;
span.expand(p.last_span());
let cast = Expr {
span,
data: CastExpr(ty, Box::new(expr)),
};
return parse_expr_suffix(p, cast, precedence);
}
_ => (),
}
}
let q = p.peek(0).1;
let prefix = parse_expr_first(p, precedence)?;
parse_expr_suffix(p, prefix, precedence)
}
fn parse_expr_suffix(
p: &mut dyn AbstractParser,
prefix: Expr,
precedence: Precedence,
) -> ReportedResult<Expr> {
let (tkn, sp) = p.peek(0);
match tkn {
OpenDelim(Brack) if precedence <= Precedence::Postfix => {
p.bump();
let expr = match parse_range_expr(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Brack)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Brack))?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: IndexExpr {
indexee: Box::new(prefix),
index: Box::new(expr),
},
};
return parse_expr_suffix(p, expr, precedence);
}
OpenDelim(Paren) if precedence <= Precedence::Postfix => {
let args = flanked(p, Paren, parse_call_args)?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: CallExpr(Box::new(prefix), args),
};
return parse_expr_suffix(p, expr, precedence);
}
Period if precedence <= Precedence::Scope => {
p.bump();
let (name, name_span) = p.eat_ident("member name")?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: MemberExpr {
expr: Box::new(prefix),
name: Identifier {
id: DUMMY_NODE_ID,
span: name_span,
name: name,
},
},
};
return parse_expr_suffix(p, expr, precedence);
}
Namespace if precedence <= Precedence::Scope => {
p.bump();
let ident = parse_identifier(p, "scope name")?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: ScopeExpr(Box::new(prefix), ident),
};
return parse_expr_suffix(p, expr, precedence);
}
Operator(Op::Inc) if precedence <= Precedence::Unary => {
p.bump();
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: UnaryExpr {
op: Op::Inc,
expr: Box::new(prefix),
postfix: true,
},
};
return parse_expr_suffix(p, expr, precedence);
}
Operator(Op::Dec) if precedence <= Precedence::Unary => {
p.bump();
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: UnaryExpr {
op: Op::Dec,
expr: Box::new(prefix),
postfix: true,
},
};
return parse_expr_suffix(p, expr, precedence);
}
Ternary if precedence < Precedence::Ternary => {
p.bump();
let true_expr = parse_expr_prec(p, Precedence::Ternary)?;
p.require_reported(Colon)?;
let false_expr = parse_expr_prec(p, Precedence::Ternary)?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: TernaryExpr {
cond: Box::new(prefix),
true_expr: Box::new(true_expr),
false_expr: Box::new(false_expr),
},
};
return parse_expr_suffix(p, expr, precedence);
}
Keyword(Kw::Inside) if precedence <= Precedence::Relational => {
p.bump();
let set = flanked(p, Brace, |p| {
comma_list_nonempty(p, CloseDelim(Brace), "range", |p| {
if p.peek(0).0 == OpenDelim(Brack) {
p.require_reported(OpenDelim(Brack))?;
let mut sp = p.last_span();
let lo = parse_expr(p)?;
p.require_reported(Colon)?;
let hi = parse_expr(p)?;
p.require_reported(CloseDelim(Brack))?;
sp.expand(p.last_span());
Ok(ValueRange::Range { lo, hi, span: sp })
} else {
Ok(ValueRange::Single(parse_expr(p)?))
}
})
})?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: InsideExpr(Box::new(prefix), set),
};
return parse_expr_suffix(p, expr, precedence);
}
Apostrophe if precedence <= Precedence::Postfix => {
p.bump();
let inner = flanked(p, Paren, |p| parse_expr(p))?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: CastSizeExpr(Box::new(prefix), Box::new(inner)),
};
return parse_expr_suffix(p, expr, precedence);
}
_ => (),
}
if let Some(op) = as_assign_operator(tkn) {
if precedence <= Precedence::Assignment {
p.bump();
let rhs = parse_expr_prec(p, Precedence::Assignment)?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: AssignExpr {
op: op,
lhs: Box::new(prefix),
rhs: Box::new(rhs),
},
};
return parse_expr_suffix(p, expr, precedence);
}
}
if let Some(op) = as_binary_operator(tkn) {
let prec = op.get_precedence();
if precedence < prec {
p.bump();
let rhs = parse_expr_prec(p, prec)?;
let expr = Expr {
span: Span::union(prefix.span, p.last_span()),
data: BinaryExpr {
op: op,
lhs: Box::new(prefix),
rhs: Box::new(rhs),
},
};
return parse_expr_suffix(p, expr, precedence);
}
}
Ok(prefix)
}
fn parse_expr_first(p: &mut dyn AbstractParser, precedence: Precedence) -> ReportedResult<Expr> {
let first = p.peek(0).1;
match p.peek(0) {
(Operator(Op::Inc), _) if precedence <= Precedence::Unary => {
p.bump();
let expr = parse_expr_prec(p, Precedence::Unary)?;
return Ok(Expr {
span: Span::union(first, p.last_span()),
data: UnaryExpr {
op: Op::Inc,
expr: Box::new(expr),
postfix: false,
},
});
}
(Operator(Op::Dec), _) if precedence <= Precedence::Unary => {
p.bump();
let expr = parse_expr_prec(p, Precedence::Unary)?;
return Ok(Expr {
span: Span::union(first, p.last_span()),
data: UnaryExpr {
op: Op::Dec,
expr: Box::new(expr),
postfix: false,
},
});
}
(Keyword(Kw::Tagged), sp) => {
p.add_diag(DiagBuilder2::error("Tagged union expressions not implemented").span(sp));
return Err(());
}
_ => (),
}
if let Some(op) = as_unary_operator(p.peek(0).0) {
p.bump();
let expr = parse_expr_prec(p, Precedence::Unary)?;
return Ok(Expr {
span: Span::union(first, p.last_span()),
data: UnaryExpr {
op: op,
expr: Box::new(expr),
postfix: false,
},
});
}
parse_primary_expr(p)
}
fn parse_primary_expr(p: &mut dyn AbstractParser) -> ReportedResult<Expr> {
let (tkn, sp) = p.peek(0);
match tkn {
Literal(lit) => {
p.bump();
return Ok(Expr {
span: sp,
data: LiteralExpr(lit),
});
}
Ident(n) | EscIdent(n) => {
p.bump();
return Ok(Expr {
span: sp,
data: IdentExpr(Identifier {
id: DUMMY_NODE_ID,
span: sp,
name: n,
}),
});
}
SysIdent(n) => {
p.bump();
return Ok(Expr {
span: sp,
data: SysIdentExpr(Identifier {
id: DUMMY_NODE_ID,
span: sp,
name: n,
}),
});
}
OpenDelim(Brace) => {
p.bump();
if p.try_eat(CloseDelim(Brace)) {
return Ok(Expr {
span: Span::union(sp, p.last_span()),
data: EmptyQueueExpr,
});
}
let data = match parse_concat_expr(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Brace)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Brace))?;
return Ok(Expr {
span: Span::union(sp, p.last_span()),
data: data,
});
}
OpenDelim(Paren) => {
p.bump();
let expr = match parse_primary_parenthesis(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Paren))?;
return Ok(expr);
}
Apostrophe => {
p.bump();
let fields = flanked(p, Brace, |p| {
comma_list_nonempty(p, CloseDelim(Brace), "pattern field", parse_pattern_field)
})?;
return Ok(Expr {
span: Span::union(sp, p.last_span()),
data: PatternExpr(fields),
});
}
tkn => {
p.add_diag(
DiagBuilder2::error(format!("Expected expression, found {} instead", tkn)).span(sp),
);
return Err(());
}
}
}
fn parse_pattern_field(p: &mut dyn AbstractParser) -> ReportedResult<PatternField> {
let mut span = p.peek(0).1;
if p.try_eat(Keyword(Kw::Default)) {
p.require_reported(Colon)?;
let value = Box::new(parse_expr(p)?);
span.expand(p.last_span());
return Ok(PatternField {
span: span,
data: PatternFieldData::Default(value),
});
}
let mut pp = ParallelParser::new();
pp.add_greedy("expression pattern", |p| {
let expr = Box::new(parse_expr(p)?);
p.require_reported(Colon)?;
let value = Box::new(parse_expr(p)?);
Ok(PatternFieldData::Member(expr, value))
});
pp.add_greedy("type pattern", |p| {
let ty = parse_explicit_type(p)?;
p.require_reported(Colon)?;
let value = Box::new(parse_expr(p)?);
Ok(PatternFieldData::Type(ty, value))
});
pp.add("expression or repeat pattern", |p| {
let expr = Box::new(parse_expr(p)?);
let data = if let Some(inner_exprs) = try_flanked(p, Brace, |p| {
comma_list(p, CloseDelim(Brace), "expression", parse_expr)
})? {
PatternFieldData::Repeat(expr, inner_exprs)
} else {
PatternFieldData::Expr(expr)
};
p.anticipate(&[Comma, CloseDelim(Brace)])?;
Ok(data)
});
let data = pp.finish(p, "expression pattern")?;
span.expand(p.last_span());
Ok(PatternField {
span: span,
data: data,
})
}
pub enum StreamDir {
In,
Out,
}
fn parse_concat_expr(p: &mut dyn AbstractParser) -> ReportedResult<ExprData> {
let stream = match p.peek(0).0 {
Operator(Op::LogicShL) => Some(StreamDir::Out),
Operator(Op::LogicShR) => Some(StreamDir::In),
_ => None,
};
if let Some(dir) = stream {
p.bump();
let slice_size = if p.peek(0).0 != OpenDelim(Brace) {
let mut pp = ParallelParser::new();
pp.add_greedy("slice size expression", |p| {
let s = parse_expr(p).map(|e| StreamConcatSlice::Expr(Box::new(e)))?;
p.anticipate(&[OpenDelim(Brace)])?;
Ok(s)
});
pp.add_greedy("slice size type", |p| {
let s = parse_explicit_type(p).map(|t| StreamConcatSlice::Type(t))?;
p.anticipate(&[OpenDelim(Brace)])?;
Ok(s)
});
Some(pp.finish(p, "slice size expression or type")?)
} else {
None
};
let exprs = flanked(p, Brace, |p| {
comma_list_nonempty(p, CloseDelim(Brace), "stream expression", |p| {
let expr = Box::new(parse_expr(p)?);
let range = if p.try_eat(Keyword(Kw::With)) {
Some(Box::new(flanked(p, Brack, parse_range_expr)?))
} else {
None
};
Ok(StreamExpr {
expr: expr,
range: range,
})
})
})?;
return Ok(StreamConcatExpr {
slice: slice_size,
exprs: exprs,
});
}
let first_expr = parse_expr_prec(p, Precedence::Concatenation)?;
if p.try_eat(OpenDelim(Brace)) {
let exprs = match parse_expr_list(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Brace)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Brace))?;
return Ok(ConcatExpr {
repeat: Some(Box::new(first_expr)),
exprs: exprs,
});
}
let mut exprs = Vec::new();
exprs.push(first_expr);
while p.try_eat(Comma) {
if p.peek(0).0 == CloseDelim(Brace) {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::warning("Superfluous trailing comma").span(q));
break;
}
exprs.push(parse_expr_prec(p, Precedence::Min)?);
}
Ok(ConcatExpr {
repeat: None,
exprs: exprs,
})
}
fn parse_expr_list(p: &mut dyn AbstractParser) -> ReportedResult<Vec<Expr>> {
let mut v = Vec::new();
loop {
v.push(parse_expr_prec(p, Precedence::Min)?);
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.peek(0).0 == CloseDelim(Brace) {
p.add_diag(DiagBuilder2::warning("Superfluous trailing comma").span(sp));
break;
}
}
(CloseDelim(Brace), _) => break,
(_, sp) => {
p.add_diag(DiagBuilder2::error("Expected , or } after expression").span(sp));
return Err(());
}
}
}
Ok(v)
}
fn parse_primary_parenthesis(p: &mut dyn AbstractParser) -> ReportedResult<Expr> {
let first = parse_expr_prec(p, Precedence::Min)?;
if p.try_eat(Colon) {
let typ = parse_expr_prec(p, Precedence::Min)?;
p.require_reported(Colon)?;
let max = parse_expr_prec(p, Precedence::Min)?;
Ok(Expr {
span: Span::union(first.span, max.span),
data: MinTypMaxExpr {
min: Box::new(first),
typ: Box::new(typ),
max: Box::new(max),
},
})
} else {
Ok(first)
}
}
fn parse_range_expr(p: &mut dyn AbstractParser) -> ReportedResult<Expr> {
let mut span = p.peek(0).1;
let first_expr = parse_expr(p)?;
let mode = match p.peek(0).0 {
Colon => RangeMode::Absolute,
AddColon => RangeMode::RelativeUp,
SubColon => RangeMode::RelativeDown,
_ => return Ok(first_expr),
};
p.bump();
let second_expr = parse_expr(p)?;
span.expand(p.last_span());
Ok(Expr {
span: span,
data: RangeExpr {
mode: mode,
lhs: Box::new(first_expr),
rhs: Box::new(second_expr),
},
})
}
fn as_unary_operator(tkn: Token) -> Option<Op> {
if let Operator(op) = tkn {
match op {
Op::Add
| Op::Sub
| Op::LogicNot
| Op::BitNot
| Op::BitAnd
| Op::BitNand
| Op::BitOr
| Op::BitNor
| Op::BitXor
| Op::BitNxor
| Op::BitXnor => Some(op),
_ => None,
}
} else {
None
}
}
fn as_binary_operator(tkn: Token) -> Option<Op> {
if let Operator(op) = tkn {
match op {
Op::Add
| Op::Sub
| Op::Mul
| Op::Div
| Op::Mod
| Op::LogicEq
| Op::LogicNeq
| Op::CaseEq
| Op::CaseNeq
| Op::WildcardEq
| Op::WildcardNeq
| Op::LogicAnd
| Op::LogicOr
| Op::Pow
| Op::Lt
| Op::Leq
| Op::Gt
| Op::Geq
| Op::BitAnd
| Op::BitNand
| Op::BitOr
| Op::BitNor
| Op::BitXor
| Op::BitXnor
| Op::BitNxor
| Op::LogicShL
| Op::LogicShR
| Op::ArithShL
| Op::ArithShR
| Op::LogicImpl
| Op::LogicEquiv => Some(op),
_ => None,
}
} else {
None
}
}
fn as_assign_operator(tkn: Token) -> Option<AssignOp> {
match tkn {
Operator(Op::Assign) => Some(AssignOp::Identity),
Operator(Op::AssignAdd) => Some(AssignOp::Add),
Operator(Op::AssignSub) => Some(AssignOp::Sub),
Operator(Op::AssignMul) => Some(AssignOp::Mul),
Operator(Op::AssignDiv) => Some(AssignOp::Div),
Operator(Op::AssignMod) => Some(AssignOp::Mod),
Operator(Op::AssignBitAnd) => Some(AssignOp::BitAnd),
Operator(Op::AssignBitOr) => Some(AssignOp::BitOr),
Operator(Op::AssignBitXor) => Some(AssignOp::BitXor),
Operator(Op::AssignLogicShL) => Some(AssignOp::LogicShL),
Operator(Op::AssignLogicShR) => Some(AssignOp::LogicShR),
Operator(Op::AssignArithShL) => Some(AssignOp::ArithShL),
Operator(Op::AssignArithShR) => Some(AssignOp::ArithShR),
_ => None,
}
}
fn parse_port_list(p: &mut dyn AbstractParser) -> ReportedResult<Vec<Port>> {
let mut v = Vec::new();
if p.try_eat(CloseDelim(Paren)) {
return Ok(v);
}
loop {
match parse_port(p) {
Ok(x) => v.push(x),
Err(()) => p.recover_balanced(&[Comma, CloseDelim(Paren)], false),
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.peek(0).0 == CloseDelim(Paren) {
p.add_diag(DiagBuilder2::warning("Superfluous trailing comma").span(sp));
break;
}
}
(CloseDelim(Paren), _) => break,
(_, sp) => {
p.add_diag(DiagBuilder2::error("Expected , or ) after port").span(sp));
p.recover_balanced(&[CloseDelim(Paren)], false);
break;
}
}
}
p.require_reported(CloseDelim(Paren))?;
Ok(v)
}
fn parse_port(p: &mut dyn AbstractParser) -> ReportedResult<ast::Port> {
let mut pp = ParallelParser::new();
pp.add_greedy("interface port", parse_interface_port);
pp.add_greedy("explicit port", parse_explicit_port);
pp.add_greedy("named port", parse_named_port);
pp.add_greedy("implicit port", parse_implicit_port);
pp.finish(p, "port")
}
fn parse_interface_port(p: &mut dyn AbstractParser) -> ReportedResult<ast::Port> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Interface))?;
let modport = if p.try_eat(Period) {
Some(parse_identifier(p, "modport name")?)
} else {
None
};
let name = parse_identifier(p, "port name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[CloseDelim(Paren), Comma])?;
span.expand(p.last_span());
Ok(ast::Port::Intf {
span: span,
modport: modport,
name: name,
dims: dims,
expr: expr,
})
}
fn parse_explicit_port(p: &mut dyn AbstractParser) -> ReportedResult<ast::Port> {
let mut span = p.peek(0).1;
let dir = as_port_direction(p.peek(0).0);
if dir.is_some() {
p.bump();
}
p.require_reported(Period)?;
let name = parse_identifier(p, "port name")?;
let expr = flanked(p, Paren, |p| {
if p.peek(0).0 == CloseDelim(Paren) {
Ok(None)
} else {
Ok(Some(parse_expr(p)?))
}
})?;
p.anticipate(&[CloseDelim(Paren), Comma])?;
span.expand(p.last_span());
Ok(ast::Port::Explicit {
span: span,
dir: dir,
name: name,
expr: expr,
})
}
fn parse_named_port(p: &mut dyn AbstractParser) -> ReportedResult<ast::Port> {
let mut span = p.peek(0).1;
let dir = as_port_direction(p.peek(0).0);
if dir.is_some() {
p.bump();
}
let kind = {
let tkn = p.peek(0).0;
if let Some(net) = as_net_type(tkn) {
p.bump();
Some(ast::PortKind::Net(net))
} else if tkn == Keyword(Kw::Var) {
p.bump();
Some(ast::PortKind::Var)
} else {
None
}
};
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, (name, dims, expr)) = pp.finish(p, "explicit or implicit type")?;
fn tail(
p: &mut dyn AbstractParser,
) -> ReportedResult<(ast::Identifier, Vec<ast::TypeDim>, Option<ast::Expr>)> {
let name = parse_identifier(p, "port name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[CloseDelim(Paren), Comma])?;
Ok((name, dims, expr))
}
span.expand(p.last_span());
Ok(ast::Port::Named {
span: span,
dir: dir,
kind: kind,
ty: ty,
name: name,
dims: dims,
expr: expr,
})
}
fn parse_implicit_port(p: &mut dyn AbstractParser) -> ReportedResult<ast::Port> {
parse_expr(p).map(|e| ast::Port::Implicit(e))
}
fn parse_parameter_assignments(
p: &mut dyn AbstractParser,
) -> ReportedResult<Vec<ast::ParamAssignment>> {
flanked(p, Paren, |p| {
comma_list(
p,
CloseDelim(Paren),
"parameter assignment",
parse_parameter_assignment,
)
})
}
fn parse_parameter_assignment(p: &mut dyn AbstractParser) -> ReportedResult<ast::ParamAssignment> {
let mut span = p.peek(0).1;
let terms = [Comma, CloseDelim(Paren)];
let (name, expr) = if p.try_eat(Period) {
let name = parse_identifier(p, "parameter name")?;
let expr = flanked(p, Paren, |p| parse_type_or_expr(p, &terms))?;
(Some(name), expr)
} else {
(None, parse_type_or_expr(p, &terms)?)
};
span.expand(p.last_span());
Ok(ast::ParamAssignment {
span: span,
name: name,
expr: expr,
})
}
fn parse_procedure(p: &mut dyn AbstractParser, kind: ProcedureKind) -> ReportedResult<Procedure> {
p.bump();
let mut span = p.last_span();
let stmt = parse_stmt(p)?;
span.expand(p.last_span());
Ok(Procedure {
span: span,
kind: kind,
stmt: stmt,
})
}
fn parse_subroutine_decl(p: &mut dyn AbstractParser) -> ReportedResult<SubroutineDecl> {
let mut span = p.peek(0).1;
let prototype = parse_subroutine_prototype(p)?;
let term = match prototype.kind {
SubroutineKind::Func => Keyword(Kw::Endfunction),
SubroutineKind::Task => Keyword(Kw::Endtask),
};
let items = repeat_until(p, term, parse_subroutine_item)?;
p.require_reported(term)?;
if p.try_eat(Colon) {
p.eat_ident("function/task name")?;
}
span.expand(p.last_span());
Ok(SubroutineDecl {
span: span,
prototype: prototype,
items: items,
})
}
fn parse_subroutine_prototype(p: &mut dyn AbstractParser) -> ReportedResult<SubroutinePrototype> {
let mut span = p.peek(0).1;
let kind = match p.peek(0).0 {
Keyword(Kw::Function) => {
p.bump();
SubroutineKind::Func
}
Keyword(Kw::Task) => {
p.bump();
SubroutineKind::Task
}
_ => {
p.add_diag(DiagBuilder2::error("Expected function or task prototype").span(span));
return Err(());
}
};
let lifetime = as_lifetime(p.peek(0).0);
if lifetime.is_some() {
p.bump();
}
let (retty, (name, args)) = if kind == SubroutineKind::Func {
if p.peek(0).0 == Keyword(Kw::New) {
(None, parse_subroutine_prototype_tail(p)?)
} else {
let mut pp = ParallelParser::new();
pp.add("implicit function return type", |p| {
let ty = parse_implicit_type(p)?;
Ok((Some(ty), parse_subroutine_prototype_tail(p)?))
});
pp.add("explicit function return type", |p| {
let ty = parse_explicit_type(p)?;
Ok((Some(ty), parse_subroutine_prototype_tail(p)?))
});
pp.finish(p, "implicit or explicit function return type")?
}
} else {
(None, parse_subroutine_prototype_tail(p)?)
};
span.expand(p.last_span());
Ok(SubroutinePrototype {
span: span,
kind: kind,
lifetime: lifetime,
name: name,
args: args,
})
}
fn parse_subroutine_prototype_tail(
p: &mut dyn AbstractParser,
) -> ReportedResult<(ast::Identifier, Vec<SubroutinePort>)> {
let name = if p.try_eat(Keyword(Kw::New)) {
ast::Identifier {
id: DUMMY_NODE_ID,
span: p.last_span(),
name: get_name_table().intern("new", true),
}
} else {
parse_identifier(p, "function or task name")?
};
let args = try_flanked(p, Paren, |p| {
comma_list(p, CloseDelim(Paren), "subroutine port", |p| {
let mut span = p.peek(0).1;
let dir = try_subroutine_port_dir(p);
let var = p.try_eat(Keyword(Kw::Var));
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, name) = pp.finish(p, "explicit or implicit type")?;
fn tail(p: &mut dyn AbstractParser) -> ReportedResult<Option<SubroutinePortName>> {
let data = if let Some(name) = try_identifier(p)? {
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
Some(SubroutinePortName {
name: name,
dims: dims,
expr: expr,
})
} else {
None
};
match p.peek(0) {
(Comma, _) | (CloseDelim(Paren), _) => Ok(data),
(_, sp) => {
p.add_diag(
DiagBuilder2::error("Expected , or ) after subroutine port").span(sp),
);
Err(())
}
}
}
span.expand(p.last_span());
Ok(SubroutinePort {
span: span,
dir: dir,
var: var,
ty: ty,
name: name,
})
})
})?
.unwrap_or(Vec::new());
p.require_reported(Semicolon)?;
Ok((name, args))
}
fn try_subroutine_port_dir(p: &mut dyn AbstractParser) -> Option<SubroutinePortDir> {
match (p.peek(0).0, p.peek(1).0) {
(Keyword(Kw::Input), _) => {
p.bump();
Some(SubroutinePortDir::Input)
}
(Keyword(Kw::Output), _) => {
p.bump();
Some(SubroutinePortDir::Output)
}
(Keyword(Kw::Inout), _) => {
p.bump();
Some(SubroutinePortDir::Inout)
}
(Keyword(Kw::Ref), _) => {
p.bump();
Some(SubroutinePortDir::Ref)
}
(Keyword(Kw::Const), Keyword(Kw::Ref)) => {
p.bump();
p.bump();
Some(SubroutinePortDir::ConstRef)
}
_ => None,
}
}
fn parse_subroutine_item(p: &mut dyn AbstractParser) -> ReportedResult<SubroutineItem> {
let mut span = p.peek(0).1;
if let Some(dir) = try_subroutine_port_dir(p) {
let var = p.try_eat(Keyword(Kw::Var));
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"port declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok((ty, names))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"port declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok((ty, names))
});
let (ty, names) = pp.finish(p, "explicit or implicit type")?;
span.expand(p.last_span());
return Ok(SubroutineItem::PortDecl(SubroutinePortDecl {
span: span,
dir: dir,
var: var,
ty: ty,
names: names,
}));
}
Ok(SubroutineItem::Stmt(parse_stmt(p)?))
}
fn parse_stmt(p: &mut dyn AbstractParser) -> ReportedResult<Stmt> {
let mut span = p.peek(0).1;
if p.try_eat(Semicolon) {
return Ok(Stmt::new_null(span));
}
let mut label = if p.is_ident() && p.peek(1).0 == Colon {
let (n, _) = p.eat_ident("statement label")?;
p.bump();
Some(n)
} else {
None
};
let data = parse_stmt_data(p, &mut label)?;
span.expand(p.last_span());
Ok(Stmt {
span: span,
label: label,
data: data,
})
}
fn parse_stmt_data(
p: &mut dyn AbstractParser,
label: &mut Option<Name>,
) -> ReportedResult<StmtData> {
let (tkn, sp) = p.peek(0);
if let Some(dc) = try_delay_control(p)? {
let stmt = Box::new(parse_stmt(p)?);
return Ok(TimedStmt(TimingControl::Delay(dc), stmt));
}
if let Some(ec) = try_event_control(p)? {
let stmt = Box::new(parse_stmt(p)?);
return Ok(TimedStmt(TimingControl::Event(ec), stmt));
}
if let Some(cd) = try_cycle_delay(p)? {
let stmt = Box::new(parse_stmt(p)?);
return Ok(TimedStmt(TimingControl::Cycle(cd), stmt));
}
Ok(match tkn {
OpenDelim(Bgend) => {
p.bump();
let (stmts, _) = parse_block(p, label, &[CloseDelim(Bgend)])?;
SequentialBlock(stmts)
}
Keyword(Kw::Fork) => {
p.bump();
let (stmts, terminator) = parse_block(
p,
label,
&[
Keyword(Kw::Join),
Keyword(Kw::JoinAny),
Keyword(Kw::JoinNone),
],
)?;
let join = match terminator {
Keyword(Kw::Join) => JoinKind::All,
Keyword(Kw::JoinAny) => JoinKind::Any,
Keyword(Kw::JoinNone) => JoinKind::None,
x => panic!("Invalid parallel block terminator {:?}", x),
};
ParallelBlock(stmts, join)
}
Keyword(Kw::Unique) => {
p.bump();
parse_if_or_case(p, Some(UniquePriority::Unique))?
}
Keyword(Kw::Unique0) => {
p.bump();
parse_if_or_case(p, Some(UniquePriority::Unique0))?
}
Keyword(Kw::Priority) => {
p.bump();
parse_if_or_case(p, Some(UniquePriority::Priority))?
}
Keyword(Kw::If) | Keyword(Kw::Case) | Keyword(Kw::Casex) | Keyword(Kw::Casez) => {
parse_if_or_case(p, None)?
}
Keyword(Kw::Forever) => {
p.bump();
let stmt = Box::new(parse_stmt(p)?);
ForeverStmt(stmt)
}
Keyword(Kw::Repeat) => {
p.bump();
let expr = flanked(p, Paren, parse_expr)?;
let stmt = Box::new(parse_stmt(p)?);
RepeatStmt(expr, stmt)
}
Keyword(Kw::While) => {
p.bump();
let expr = flanked(p, Paren, parse_expr)?;
let stmt = Box::new(parse_stmt(p)?);
WhileStmt(expr, stmt)
}
Keyword(Kw::Do) => {
p.bump();
let stmt = Box::new(parse_stmt(p)?);
let q = p.last_span();
if !p.try_eat(Keyword(Kw::While)) {
p.add_diag(DiagBuilder2::error("Do loop requires a while clause").span(q));
return Err(());
}
let expr = flanked(p, Paren, parse_expr)?;
DoStmt(stmt, expr)
}
Keyword(Kw::For) => {
p.bump();
let (init, cond, step) = flanked(p, Paren, |p| {
let init = Box::new(parse_stmt(p)?);
let cond = parse_expr(p)?;
p.require_reported(Semicolon)?;
let step = parse_expr(p)?;
Ok((init, cond, step))
})?;
let stmt = Box::new(parse_stmt(p)?);
ForStmt(init, cond, step, stmt)
}
Keyword(Kw::Foreach) => {
p.bump();
let (expr, vars) = flanked(p, Paren, |p| {
let expr = parse_expr_prec(p, Precedence::Scope)?;
let vars = flanked(p, Brack, |p| {
let mut v = Vec::new();
while p.peek(0).0 != Eof && p.peek(0).0 != CloseDelim(Brack) {
if p.peek(0).0 != Comma {
v.push(Some(parse_identifier(p, "loop variable name")?));
} else {
v.push(None)
}
match p.peek(0) {
(Comma, _) => p.bump(),
(CloseDelim(Brack), _) => (),
(tkn, sp) => {
p.add_diag(
DiagBuilder2::error(format!(
"expected , or ] after loop variable; found {} instead",
tkn
))
.span(sp),
);
return Err(());
}
}
}
Ok(v)
})?;
Ok((expr, vars))
})?;
let stmt = Box::new(parse_stmt(p)?);
ForeachStmt(expr, vars, stmt)
}
Keyword(Kw::Genvar) => {
p.bump();
let names = comma_list_nonempty(p, Semicolon, "genvar declaration", parse_genvar_decl)?;
p.require_reported(Semicolon)?;
GenvarDeclStmt(names)
}
Keyword(Kw::Return) => {
p.bump();
ReturnStmt(if p.try_eat(Semicolon) {
None
} else {
let expr = parse_expr(p)?;
p.require_reported(Semicolon)?;
Some(expr)
})
}
Keyword(Kw::Break) => {
p.bump();
p.require_reported(Semicolon)?;
BreakStmt
}
Keyword(Kw::Continue) => {
p.bump();
p.require_reported(Semicolon)?;
ContinueStmt
}
Keyword(Kw::Import) => ImportStmt(parse_import_decl(p)?),
Keyword(Kw::Assert)
| Keyword(Kw::Assume)
| Keyword(Kw::Cover)
| Keyword(Kw::Expect)
| Keyword(Kw::Restrict) => AssertionStmt(Box::new(parse_assertion(p)?)),
Keyword(Kw::Wait) => {
p.bump();
match p.peek(0) {
(OpenDelim(Paren), _) => {
let expr = flanked(p, Paren, parse_expr)?;
let stmt = Box::new(parse_stmt(p)?);
WaitExprStmt(expr, stmt)
}
(Keyword(Kw::Fork), _) => {
p.bump();
p.require_reported(Semicolon)?;
WaitForkStmt
}
(tkn, sp) => {
p.add_diag(
DiagBuilder2::error(format!(
"Expected (<expr>) or fork after wait, found {} instead",
tkn
))
.span(sp),
);
return Err(());
}
}
}
Keyword(Kw::WaitOrder) => {
p.add_diag(
DiagBuilder2::error("Don't know how to parse wait_order statements").span(sp),
);
return Err(());
}
Keyword(Kw::Disable) => {
p.bump();
if p.try_eat(Keyword(Kw::Fork)) {
p.require_reported(Semicolon)?;
DisableForkStmt
} else {
let (name, _) = p.eat_ident("task or block name")?;
p.require_reported(Semicolon)?;
DisableStmt(name)
}
}
_ => {
let result = {
let mut pp = ParallelParser::new();
pp.add("variable declaration", |p| {
parse_var_decl(p).map(|d| ast::VarDeclStmt(d))
});
pp.add("assign statement", |p| parse_assign_stmt(p));
pp.add("expression statement", |p| parse_expr_stmt(p));
pp.finish(p, "statement")
};
match result {
Ok(x) => x,
Err(_) => {
p.recover_balanced(&[Semicolon], true);
return Err(());
}
}
}
})
}
fn parse_block(
p: &mut dyn AbstractParser,
label: &mut Option<Name>,
terminators: &[Token],
) -> ReportedResult<(Vec<Stmt>, Token)> {
let span = p.last_span();
if p.try_eat(Colon) {
let (name, name_span) = p.eat_ident("block label")?;
if let Some(existing) = *label {
if name == existing {
p.add_diag(
DiagBuilder2::warning(format!("Block {} labelled twice", name)).span(name_span),
);
} else {
p.add_diag(
DiagBuilder2::error(format!(
"Block has been given two conflicting labels, {} and {}",
existing, name
))
.span(name_span),
);
}
} else {
*label = Some(name);
}
}
let mut v = Vec::new();
let terminator;
'outer: loop {
let tkn = p.peek(0).0;
for term in terminators {
if tkn == *term {
terminator = *term;
p.bump();
break 'outer;
}
}
match parse_stmt(p) {
Ok(x) => v.push(x),
Err(()) => {
p.recover_balanced(terminators, false);
terminator = p.peek(0).0;
p.bump();
break;
}
}
}
if p.try_eat(Colon) {
let (name, name_span) = p.eat_ident("block label")?;
if let Some(before) = *label {
if before != name {
p.add_diag(DiagBuilder2::error(format!("Block label {} at end of block does not match label {} at beginning of block", name, before)).span(name_span));
}
} else {
p.add_diag(
DiagBuilder2::error(format!(
"Block label {} provided at the end of the block, but not at the beginning",
name
))
.span(name_span),
);
}
}
Ok((v, terminator))
}
fn parse_continuous_assign(p: &mut dyn AbstractParser) -> ReportedResult<ContAssign> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Assign))?;
let strength = try_flanked(p, Paren, |p| {
let span = p.peek(0).1;
match try_drive_strength(p)? {
Some(x) => Ok(x),
None => {
p.add_diag(DiagBuilder2::error("Expected drive strength").span(span));
Err(())
}
}
})?;
let delay_control = try_delay_control(p)?;
let assignments = comma_list_nonempty(p, Semicolon, "continuous assignment", parse_assignment)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ContAssign {
span: span,
strength: strength,
delay: None,
delay_control: delay_control,
assignments: assignments,
})
}
fn parse_if_or_case(
p: &mut dyn AbstractParser,
up: Option<UniquePriority>,
) -> ReportedResult<StmtData> {
let (tkn, span) = p.peek(0);
match tkn {
Keyword(Kw::Case) => {
p.bump();
parse_case(p, up, CaseKind::Normal)
}
Keyword(Kw::Casez) => {
p.bump();
parse_case(p, up, CaseKind::DontCareZ)
}
Keyword(Kw::Casex) => {
p.bump();
parse_case(p, up, CaseKind::DontCareXZ)
}
Keyword(Kw::If) => {
p.bump();
parse_if(p, up)
}
x => {
p.add_diag(
DiagBuilder2::error(format!("Expected case or if statement, got {:?}", x))
.span(span),
);
Err(())
}
}
}
fn parse_case(
p: &mut dyn AbstractParser,
up: Option<UniquePriority>,
kind: CaseKind,
) -> ReportedResult<StmtData> {
let q = p.last_span();
p.require_reported(OpenDelim(Paren))?;
let expr = match parse_expr(p) {
Ok(x) => x,
Err(()) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
return Err(());
}
};
p.require_reported(CloseDelim(Paren))?;
let mode = match p.peek(0).0 {
Keyword(Kw::Inside) => {
p.bump();
CaseMode::Inside
}
Keyword(Kw::Matches) => {
p.bump();
CaseMode::Pattern
}
_ => CaseMode::Normal,
};
let mut items = Vec::new();
while p.peek(0).0 != Keyword(Kw::Endcase) && p.peek(0).0 != Eof {
let mut span = p.peek(0).1;
if p.peek(0).0 == Keyword(Kw::Default) {
p.bump();
p.try_eat(Colon);
let stmt = Box::new(parse_stmt(p)?);
items.push(CaseItem::Default(stmt));
}
else {
let mut exprs = Vec::new();
loop {
if p.peek(0).0 == OpenDelim(Brack) {
p.require_reported(OpenDelim(Brack))?;
parse_expr(p)?;
p.require_reported(Colon)?;
parse_expr(p)?;
p.require_reported(CloseDelim(Brack))?;
} else {
match parse_expr(p) {
Ok(x) => exprs.push(x),
Err(()) => {
p.recover_balanced(&[Colon], false);
break;
}
}
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.try_eat(Colon) {
p.add_diag(
DiagBuilder2::warning("Superfluous trailing comma").span(sp),
);
break;
}
}
(Colon, _) => break,
(_, sp) => {
p.add_diag(
DiagBuilder2::error("Expected , or : after case expression").span(sp),
);
break;
}
}
}
p.require_reported(Colon)?;
let stmt = Box::new(parse_stmt(p)?);
items.push(CaseItem::Expr(exprs, stmt));
}
}
p.require_reported(Keyword(Kw::Endcase))?;
Ok(CaseStmt {
up: up,
kind: kind,
expr: expr,
mode: mode,
items: items,
})
}
fn parse_if(p: &mut dyn AbstractParser, up: Option<UniquePriority>) -> ReportedResult<StmtData> {
p.require_reported(OpenDelim(Paren))?;
let cond = match parse_expr(p) {
Ok(x) => x,
Err(()) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
return Err(());
}
};
p.require_reported(CloseDelim(Paren))?;
let main_stmt = Box::new(parse_stmt(p)?);
let else_stmt = if p.peek(0).0 == Keyword(Kw::Else) {
p.bump();
Some(Box::new(parse_stmt(p)?))
} else {
None
};
Ok(IfStmt {
up: up,
cond: cond,
main_stmt: main_stmt,
else_stmt: else_stmt,
})
}
fn try_delay_control(p: &mut dyn AbstractParser) -> ReportedResult<Option<DelayControl>> {
if !p.try_eat(Hashtag) {
return Ok(None);
}
let mut span = p.last_span();
let (tkn, sp) = p.peek(0);
let expr = match tkn {
OpenDelim(Paren) => {
p.bump();
let e = parse_expr_prec(p, Precedence::MinTypMax)?;
p.require_reported(CloseDelim(Paren))?;
e
}
Literal(Number(..)) | Literal(Time(..)) | Ident(..) => {
parse_expr_first(p, Precedence::Max)?
}
_ => {
p.add_diag(DiagBuilder2::error("Expected delay value or expression after #").span(sp));
return Err(());
}
};
span.expand(p.last_span());
Ok(Some(DelayControl {
span: span,
expr: expr,
}))
}
fn try_event_control(p: &mut dyn AbstractParser) -> ReportedResult<Option<EventControl>> {
if !p.try_eat(At) {
return Ok(None);
}
let mut span = p.last_span();
if p.peek(0).0 == Operator(Op::Mul) {
p.bump();
span.expand(p.last_span());
return Ok(Some(EventControl {
span: span,
data: EventControlData::Implicit,
}));
}
if p.peek(0).0 == OpenDelim(Paren)
&& p.peek(1).0 == Operator(Op::Mul)
&& p.peek(2).0 == CloseDelim(Paren)
{
p.bump();
p.bump();
p.bump();
span.expand(p.last_span());
return Ok(Some(EventControl {
span: span,
data: EventControlData::Implicit,
}));
}
let expr = parse_event_expr(p, EventPrecedence::Max)?;
span.expand(p.last_span());
Ok(Some(EventControl {
span: span,
data: EventControlData::Expr(expr),
}))
}
fn try_cycle_delay(p: &mut dyn AbstractParser) -> ReportedResult<Option<CycleDelay>> {
if !p.try_eat(DoubleHashtag) {
return Ok(None);
}
let q = p.last_span();
p.add_diag(DiagBuilder2::error("Don't know how to parse cycle delay").span(q));
Err(())
}
fn parse_assignment(p: &mut dyn AbstractParser) -> ReportedResult<(Expr, Expr)> {
let lhs = parse_expr_prec(p, Precedence::Postfix)?;
p.require_reported(Operator(Op::Assign))?;
let rhs = parse_expr_prec(p, Precedence::Assignment)?;
Ok((lhs, rhs))
}
fn parse_assign_stmt(p: &mut dyn AbstractParser) -> ReportedResult<StmtData> {
let expr = parse_expr_prec(p, Precedence::Postfix)?;
let (tkn, sp) = p.peek(0);
if let Some(op) = as_assign_operator(tkn) {
p.bump();
let rhs = parse_expr(p)?;
p.require_reported(Semicolon)?;
return Ok(BlockingAssignStmt {
lhs: expr,
rhs: rhs,
op: op,
});
}
if tkn == Operator(Op::Leq) {
p.bump();
let delay_control = try_delay_control(p)?;
let event_control = None;
let rhs = parse_expr(p)?;
p.require_reported(Semicolon)?;
return Ok(NonblockingAssignStmt {
lhs: expr,
rhs: rhs,
delay: delay_control,
event: event_control,
});
}
p.add_diag(DiagBuilder2::error("Expected blocking or non-blocking assign statement").span(sp));
Err(())
}
fn parse_expr_stmt(p: &mut dyn AbstractParser) -> ReportedResult<StmtData> {
let expr = parse_expr_prec(p, Precedence::Unary)?;
p.require_reported(Semicolon)?;
Ok(ExprStmt(expr))
}
fn parse_event_expr(
p: &mut dyn AbstractParser,
precedence: EventPrecedence,
) -> ReportedResult<EventExpr> {
let mut span = p.peek(0).1;
if p.try_eat(OpenDelim(Paren)) {
return match parse_event_expr(p, EventPrecedence::Min) {
Ok(x) => {
p.require_reported(CloseDelim(Paren))?;
parse_event_expr_suffix(p, x, precedence)
}
Err(()) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
Err(())
}
};
}
let edge = as_edge_ident(p.peek(0).0);
if edge != EdgeIdent::Implicit {
p.bump();
}
let value = parse_expr(p)?;
span.expand(p.last_span());
let expr = EventExpr::Edge {
span: span,
edge: edge,
value: value,
};
parse_event_expr_suffix(p, expr, precedence)
}
fn parse_event_expr_suffix(
p: &mut dyn AbstractParser,
expr: EventExpr,
precedence: EventPrecedence,
) -> ReportedResult<EventExpr> {
match p.peek(0).0 {
Keyword(Kw::Iff) if precedence < EventPrecedence::Iff => {
p.bump();
let cond = parse_expr(p)?;
Ok(EventExpr::Iff {
span: Span::union(expr.span(), cond.span),
expr: Box::new(expr),
cond: cond,
})
}
Keyword(Kw::Or) | Comma if precedence <= EventPrecedence::Or => {
p.bump();
let rhs = parse_event_expr(p, EventPrecedence::Or)?;
Ok(EventExpr::Or {
span: Span::union(expr.span(), rhs.span()),
lhs: Box::new(expr),
rhs: Box::new(rhs),
})
}
_ => Ok(expr),
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
enum EventPrecedence {
Min,
Or,
Iff,
Max,
}
fn as_edge_ident(tkn: Token) -> EdgeIdent {
match tkn {
Keyword(Kw::Edge) => EdgeIdent::Edge,
Keyword(Kw::Posedge) => EdgeIdent::Posedge,
Keyword(Kw::Negedge) => EdgeIdent::Negedge,
_ => EdgeIdent::Implicit,
}
}
fn parse_call_args(p: &mut dyn AbstractParser) -> ReportedResult<Vec<CallArg>> {
let mut v = Vec::new();
if p.peek(0).0 == CloseDelim(Paren) {
return Ok(v);
}
loop {
match p.peek(0) {
(Comma, sp) => v.push(CallArg {
span: sp,
name_span: sp,
name: None,
expr: None,
}),
(Period, mut sp) => {
p.bump();
let (name, mut name_sp) = p.eat_ident("argument name")?;
name_sp.expand(sp);
let expr = flanked(p, Paren, |p| {
Ok(if p.peek(0).0 == CloseDelim(Paren) {
None
} else {
Some(parse_expr(p)?)
})
})?;
sp.expand(p.last_span());
v.push(CallArg {
span: sp,
name_span: name_sp,
name: Some(name),
expr: expr,
});
}
(_, mut sp) => {
let expr = parse_expr(p)?;
sp.expand(p.last_span());
v.push(CallArg {
span: sp,
name_span: sp,
name: None,
expr: Some(expr),
});
}
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.try_eat(CloseDelim(Paren)) {
p.add_diag(DiagBuilder2::warning("Superfluous trailing comma").span(sp));
break;
}
}
(CloseDelim(Paren), _) => break,
(_, sp) => {
p.add_diag(DiagBuilder2::error("Expected , or ) after call argument").span(sp));
return Err(());
}
}
}
Ok(v)
}
fn parse_variable_decl_assignment(p: &mut dyn AbstractParser) -> ReportedResult<VarDeclName> {
let mut span = p.peek(0).1;
let (name, name_span) = p.eat_ident("variable name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let init = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(VarDeclName {
id: DUMMY_NODE_ID,
span: span,
name: name,
name_span: name_span,
dims: dims,
init: init,
})
}
fn parse_genvar_decl(p: &mut dyn AbstractParser) -> ReportedResult<GenvarDecl> {
let mut span = p.peek(0).1;
let (name, name_span) = p.eat_ident("genvar name")?;
let init = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(GenvarDecl {
id: DUMMY_NODE_ID,
span: span,
name: name,
name_span: name_span,
init: init,
})
}
fn parse_generate_item(p: &mut dyn AbstractParser) -> ReportedResult<HierarchyItem> {
match p.peek(0).0 {
Keyword(Kw::For) => parse_generate_for(p).map(|x| HierarchyItem::GenerateFor(x)),
Keyword(Kw::If) => parse_generate_if(p).map(|x| HierarchyItem::GenerateIf(x)),
Keyword(Kw::Case) => parse_generate_case(p).map(|x| HierarchyItem::GenerateCase(x)),
_ => parse_hierarchy_item(p),
}
}
fn parse_generate_for(p: &mut dyn AbstractParser) -> ReportedResult<GenerateFor> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::For))?;
let (init, cond, step) = flanked(p, Paren, |p| {
let init = parse_stmt(p)?;
let cond = parse_expr(p)?;
p.require_reported(Semicolon)?;
let step = parse_expr(p)?;
Ok((init, cond, step))
})?;
let block = parse_generate_block(p)?;
span.expand(p.last_span());
Ok(GenerateFor {
span: span,
init: init,
cond: cond,
step: step,
block: block,
})
}
fn parse_generate_if(p: &mut dyn AbstractParser) -> ReportedResult<GenerateIf> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::If))?;
let cond = flanked(p, Paren, parse_expr)?;
let main_block = parse_generate_block(p)?;
let else_block = if p.try_eat(Keyword(Kw::Else)) {
Some(parse_generate_block(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(GenerateIf {
span: span,
cond: cond,
main_block: main_block,
else_block: else_block,
})
}
fn parse_generate_case(p: &mut dyn AbstractParser) -> ReportedResult<GenerateCase> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Case))?;
let q = p.last_span();
p.add_diag(DiagBuilder2::error("Don't know how to parse case-generate statements").span(q));
Err(())
}
fn parse_generate_block(p: &mut dyn AbstractParser) -> ReportedResult<GenerateBlock> {
let mut span = p.peek(0).1;
let mut label = if p.is_ident() && p.peek(1).0 == Colon {
let (n, _) = p.eat_ident("generate block label")?;
p.require_reported(Colon)?;
Some(n)
} else {
None
};
if !p.try_eat(OpenDelim(Bgend)) {
if label.is_some() {
let (t, q) = p.peek(0);
p.add_diag(
DiagBuilder2::error(format!(
"Expected `begin` keyword after generate block label, found {} instead",
t
))
.span(q),
);
return Err(());
}
let item = parse_generate_item(p)?;
span.expand(p.last_span());
return Ok(GenerateBlock {
span: span,
label: label,
items: vec![item],
});
}
if p.try_eat(Colon) {
let (n, sp) = p.eat_ident("generate block label")?;
if let Some(existing) = label {
if existing == n {
p.add_diag(
DiagBuilder2::warning(format!("Generate block {} labelled twice", n)).span(sp),
);
} else {
p.add_diag(
DiagBuilder2::error(format!(
"Generate block given conflicting labels {} and {}",
existing, n
))
.span(sp),
);
return Err(());
}
} else {
label = Some(n);
}
}
let items = repeat_until(p, CloseDelim(Bgend), parse_generate_item)?;
p.require_reported(CloseDelim(Bgend))?;
if p.try_eat(Colon) {
let (n, sp) = p.eat_ident("generate block label")?;
if let Some(existing) = label {
if existing != n {
p.add_diag(DiagBuilder2::error(format!("Label {} given after generate block does not match label {} given before the block", n, existing)).span(sp));
return Err(());
}
} else {
p.add_diag(
DiagBuilder2::warning(format!(
"Generate block has trailing label {}, but is missing leading label",
n
))
.span(sp),
);
}
}
span.expand(p.last_span());
Ok(GenerateBlock {
span: span,
label: label,
items: items,
})
}
fn parse_class_decl(p: &mut dyn AbstractParser) -> ReportedResult<ClassDecl> {
let mut span = p.peek(0).1;
let result = recovered(p, Keyword(Kw::Endclass), |p| {
let virt = p.try_eat(Keyword(Kw::Virtual));
p.require_reported(Keyword(Kw::Class))?;
let lifetime = match as_lifetime(p.peek(0).0) {
Some(l) => {
p.bump();
l
}
None => Lifetime::Static,
};
let name = parse_identifier(p, "class name")?;
let params = if p.try_eat(Hashtag) {
parse_parameter_port_list(p)?
} else {
Vec::new()
};
let extends = if p.try_eat(Keyword(Kw::Extends)) {
let superclass = parse_data_type(p)?;
let args = try_flanked(p, Paren, parse_call_args)?.unwrap_or(Vec::new());
Some((superclass, args))
} else {
None
};
p.require_reported(Semicolon)?;
let items = repeat_until(p, Keyword(Kw::Endclass), parse_class_item)?;
Ok((virt, lifetime, name, params, extends, items))
});
p.require_reported(Keyword(Kw::Endclass))?;
let (virt, lifetime, name, params, extends, items) = result?;
if p.try_eat(Colon) {
let (n, sp) = p.eat_ident("class name")?;
if n != name.name {
p.add_diag(
DiagBuilder2::error(format!(
"Class name {} disagrees with name {} given before",
n, name.name
))
.span(sp),
);
return Err(());
}
}
span.expand(p.last_span());
Ok(ClassDecl {
span: span,
virt: virt,
lifetime: lifetime,
name: name,
params: params,
extends: extends,
items: items,
})
}
fn parse_class_item(p: &mut dyn AbstractParser) -> ReportedResult<ClassItem> {
let mut span = p.peek(0).1;
if p.try_eat(Semicolon) {
return Ok(ClassItem {
span: span,
qualifiers: Vec::new(),
data: ClassItemData::Null,
});
}
match p.peek(0).0 {
Keyword(Kw::Localparam) => {
return Ok(ClassItem {
span: span,
qualifiers: Vec::new(),
data: ClassItemData::LocalparamDecl(parse_localparam_decl(p)?),
});
}
Keyword(Kw::Parameter) => {
return Ok(ClassItem {
span: span,
qualifiers: Vec::new(),
data: ClassItemData::ParameterDecl(parse_parameter_decl(p)?),
});
}
_ => (),
}
if p.try_eat(Keyword(Kw::Extern)) {
let proto = parse_subroutine_prototype(p)?;
span.expand(p.last_span());
return Ok(ClassItem {
span: span,
qualifiers: Vec::new(),
data: ClassItemData::ExternSubroutine(proto),
});
}
let qualifiers = parse_class_item_qualifiers(p)?;
let data = {
let mut pp = ParallelParser::new();
pp.add("class property", |p| {
let ty = parse_data_type(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"data declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok(ClassItemData::Property)
});
pp.add("class function or task", |p| {
parse_subroutine_decl(p).map(|d| ClassItemData::SubroutineDecl(d))
});
pp.add("class constraint", |p| {
parse_constraint(p).map(|c| ClassItemData::Constraint(c))
});
pp.finish(p, "class item")?
};
span.expand(p.last_span());
Ok(ClassItem {
span: span,
qualifiers: qualifiers,
data: data,
})
}
fn parse_class_item_qualifiers(
p: &mut dyn AbstractParser,
) -> ReportedResult<Vec<(ClassItemQualifier, Span)>> {
let mut v = Vec::new();
loop {
let (tkn, sp) = p.peek(0);
match tkn {
Keyword(Kw::Static) => v.push((ClassItemQualifier::Static, sp)),
Keyword(Kw::Protected) => v.push((ClassItemQualifier::Protected, sp)),
Keyword(Kw::Local) => v.push((ClassItemQualifier::Local, sp)),
Keyword(Kw::Rand) => v.push((ClassItemQualifier::Rand, sp)),
Keyword(Kw::Randc) => v.push((ClassItemQualifier::Randc, sp)),
Keyword(Kw::Pure) => v.push((ClassItemQualifier::Pure, sp)),
Keyword(Kw::Virtual) => v.push((ClassItemQualifier::Virtual, sp)),
Keyword(Kw::Const) => v.push((ClassItemQualifier::Const, sp)),
_ => break,
}
p.bump();
}
Ok(v)
}
fn parse_class_method(p: &mut dyn AbstractParser) -> ReportedResult<ClassItem> {
println!("Parsing class method");
Err(())
}
fn parse_class_property(p: &mut dyn AbstractParser) -> ReportedResult<ClassItem> {
println!("Parsing class property");
p.try_eat(Keyword(Kw::Rand));
Err(())
}
fn parse_constraint(p: &mut dyn AbstractParser) -> ReportedResult<Constraint> {
let mut span = p.peek(0).1;
let kind = match p.peek(0).0 {
Keyword(Kw::Extern) => {
p.bump();
ConstraintKind::ExternProto
}
Keyword(Kw::Pure) => {
p.bump();
ConstraintKind::PureProto
}
_ => ConstraintKind::Decl,
};
let kind_span = span;
let statik = p.try_eat(Keyword(Kw::Static));
p.require_reported(Keyword(Kw::Constraint))?;
let (name, name_span) = p.eat_ident("constraint name")?;
let items = if p.try_eat(Semicolon) {
let kind = match kind {
ConstraintKind::Decl => ConstraintKind::Proto,
x => x,
};
Vec::new()
} else {
if kind == ConstraintKind::ExternProto || kind == ConstraintKind::PureProto {
p.add_diag(
DiagBuilder2::error("Only constraint prototypes can be extern or pure")
.span(kind_span),
);
return Err(());
}
flanked(p, Brace, |p| {
repeat_until(p, CloseDelim(Brace), parse_constraint_item)
})?
};
span.expand(p.last_span());
Ok(Constraint {
span: span,
kind: kind,
statik: statik,
name: name,
name_span: name_span,
items: items,
})
}
fn parse_constraint_item(p: &mut dyn AbstractParser) -> ReportedResult<ConstraintItem> {
let mut span = p.peek(0).1;
let data = parse_constraint_item_data(p)?;
span.expand(p.last_span());
Ok(ConstraintItem {
span: span,
data: data,
})
}
fn parse_constraint_item_data(p: &mut dyn AbstractParser) -> ReportedResult<ConstraintItemData> {
if p.try_eat(Keyword(Kw::If)) {
let q = p.last_span();
p.add_diag(DiagBuilder2::error("Don't know how to parse `if` constraint items").span(q));
return Err(());
}
if p.try_eat(Keyword(Kw::Foreach)) {
let q = p.last_span();
p.add_diag(
DiagBuilder2::error("Don't know how to parse `foreach` constraint items").span(q),
);
return Err(());
}
let expr = parse_expr(p)?;
p.require_reported(Semicolon)?;
Ok(ConstraintItemData::Expr(expr))
}
struct ParallelParser<'a, R: Clone> {
branches: Vec<(
String,
Box<dyn FnMut(&mut dyn AbstractParser) -> ReportedResult<R> + 'a>,
bool,
)>,
}
impl<'a, R: Clone> ParallelParser<'a, R> {
pub fn new() -> Self {
ParallelParser {
branches: Vec::new(),
}
}
pub fn add<F>(&mut self, name: &str, func: F)
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R> + 'a,
{
self.branches.push((name.to_owned(), Box::new(func), false));
}
pub fn add_greedy<F>(&mut self, name: &str, func: F)
where
F: FnMut(&mut dyn AbstractParser) -> ReportedResult<R> + 'a,
{
self.branches.push((name.to_owned(), Box::new(func), true));
}
pub fn finish(self, p: &mut dyn AbstractParser, msg: &str) -> ReportedResult<R> {
let (tkn, q) = p.peek(0);
let mut results = Vec::new();
let mut matched = Vec::new();
for (name, mut func, greedy) in self.branches {
let mut bp = BranchParser::new(p);
match func(&mut bp) {
Ok(x) => {
if greedy {
bp.commit();
return Ok(x);
} else {
let sp = bp.last_span();
results.push((name, bp.consumed, bp.diagnostics, x, Span::union(q, sp)));
}
}
Err(_) => matched.push((name, bp.consumed() - bp.skipped(), bp.diagnostics)),
}
}
if results.len() > 1 {
let mut names = String::new();
names.push_str(&results[0].0);
if results.len() == 2 {
names.push_str(" or ");
names.push_str(&results[1].0);
} else {
for &(ref name, _, _, _, _) in &results[..results.len() - 1] {
names.push_str(", ");
names.push_str(&name);
}
names.push_str(", or ");
names.push_str(&results[results.len() - 1].0);
}
p.add_diag(DiagBuilder2::fatal(format!("Ambiguous code, could be {}", names)).span(q));
for &(ref name, _, _, _, span) in &results {
p.add_diag(DiagBuilder2::note(format!("{} would be this part", name)).span(span));
}
Err(())
} else if let Some(&(_, consumed, ref diagnostics, ref res, _)) = results.last() {
for d in diagnostics {
p.add_diag(d.clone());
}
for _ in 0..consumed {
p.bump();
}
Ok((*res).clone())
} else {
matched.sort_by(|a, b| (b.1).cmp(&a.1));
let highest_score = matched[0].1;
let errors = matched
.into_iter()
.take_while(|e| e.1 == highest_score)
.collect::<Vec<_>>();
let num_errors = errors.len();
if num_errors != 1 {
p.add_diag(
DiagBuilder2::error(format!("Expected {}, found {} instead", msg, tkn)).span(q),
);
} else {
for d in errors.into_iter().next().unwrap().2 {
p.add_diag(d);
}
}
Err(())
}
}
}
struct BranchParser<'tp> {
parser: &'tp mut dyn AbstractParser,
consumed: usize,
skipped: usize,
diagnostics: Vec<DiagBuilder2>,
last_span: Span,
severity: Severity,
}
impl<'tp> BranchParser<'tp> {
pub fn new(parser: &'tp mut dyn AbstractParser) -> Self {
let last = parser.last_span();
BranchParser {
parser: parser,
consumed: 0,
skipped: 0,
diagnostics: Vec::new(),
last_span: last,
severity: Severity::Note,
}
}
pub fn skipped(&self) -> usize {
self.skipped
}
pub fn commit(self) {
for _ in 0..self.consumed {
self.parser.bump();
}
for d in self.diagnostics {
self.parser.add_diag(d);
}
}
}
impl<'tp> AbstractParser for BranchParser<'tp> {
fn peek(&mut self, offset: usize) -> TokenAndSpan {
self.parser.peek(self.consumed + offset)
}
fn bump(&mut self) {
self.last_span = self.parser.peek(self.consumed).1;
self.consumed += 1;
}
fn skip(&mut self) {
self.bump();
self.skipped += 1;
}
fn consumed(&self) -> usize {
self.consumed
}
fn last_span(&self) -> Span {
self.last_span
}
fn add_diag(&mut self, diag: DiagBuilder2) {
if diag.severity > self.severity {
self.severity = diag.severity;
}
self.diagnostics.push(diag);
}
fn severity(&self) -> Severity {
self.severity
}
}
fn parse_typedef(p: &mut dyn AbstractParser) -> ReportedResult<Typedef> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Typedef))?;
let ty = parse_explicit_type(p)?;
let name = parse_identifier(p, "type name")?;
let (dims, _) = parse_optional_dimensions(p)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(Typedef {
span: span,
name: name,
ty: ty,
dims: dims,
})
}
fn parse_port_decl(p: &mut dyn AbstractParser) -> ReportedResult<PortDecl> {
let mut span = p.peek(0).1;
let dir = match as_port_direction(p.peek(0).0) {
Some(x) => {
p.bump();
x
}
None => {
p.add_diag(
DiagBuilder2::error("Expected port direction (inout, input, output, or ref)")
.span(span),
);
return Err(());
}
};
let net_type = as_net_type(p.peek(0).0);
let var = if net_type.is_some() {
p.bump();
false
} else {
p.try_eat(Keyword(Kw::Var))
};
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, names) = pp.finish(p, "explicit or implicit type")?;
fn tail(p: &mut dyn AbstractParser) -> ReportedResult<Vec<VarDeclName>> {
let names = comma_list_nonempty(
p,
Semicolon,
"port declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok(names)
}
span.expand(p.last_span());
Ok(PortDecl {
span: span,
dir: dir,
net_type: net_type,
var: var,
ty: ty,
names: names,
})
}
fn as_net_type(tkn: Token) -> Option<NetType> {
match tkn {
Keyword(Kw::Supply0) => Some(NetType::Supply0),
Keyword(Kw::Supply1) => Some(NetType::Supply1),
Keyword(Kw::Tri) => Some(NetType::Tri),
Keyword(Kw::Triand) => Some(NetType::TriAnd),
Keyword(Kw::Trior) => Some(NetType::TriOr),
Keyword(Kw::Trireg) => Some(NetType::TriReg),
Keyword(Kw::Tri0) => Some(NetType::Tri0),
Keyword(Kw::Tri1) => Some(NetType::Tri1),
Keyword(Kw::Uwire) => Some(NetType::Uwire),
Keyword(Kw::Wire) => Some(NetType::Wire),
Keyword(Kw::Wand) => Some(NetType::WireAnd),
Keyword(Kw::Wor) => Some(NetType::WireOr),
_ => None,
}
}
fn parse_net_decl(p: &mut dyn AbstractParser) -> ReportedResult<NetDecl> {
let mut span = p.peek(0).1;
let net_type = match as_net_type(p.peek(0).0) {
Some(x) => {
p.bump();
x
}
None => {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("Expected net type").span(q));
return Err(());
}
};
let strength = try_flanked(p, Paren, parse_net_strength)?;
let kind = match p.peek(0).0 {
Keyword(Kw::Vectored) => {
p.bump();
NetKind::Vectored
}
Keyword(Kw::Scalared) => {
p.bump();
NetKind::Scalared
}
_ => NetKind::None,
};
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, (delay, names)) = pp.finish(p, "explicit or implicit type")?;
fn tail(
p: &mut dyn AbstractParser,
) -> ReportedResult<(Option<DelayControl>, Vec<VarDeclName>)> {
let delay = try_delay_control(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"net declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok((delay, names))
}
span.expand(p.last_span());
Ok(NetDecl {
span: span,
net_type: net_type,
strength: strength,
kind: kind,
ty: ty,
delay: delay,
names: names,
})
}
fn try_drive_strength(
p: &mut dyn AbstractParser,
) -> ReportedResult<Option<(DriveStrength, DriveStrength)>> {
if let Some(a) = as_drive_strength(p.peek(0).0) {
p.bump();
p.require_reported(Comma)?;
if let Some(b) = as_drive_strength(p.peek(0).0) {
Ok(Some((a, b)))
} else {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("Expected second drive strength").span(q));
Err(())
}
} else {
Ok(None)
}
}
fn parse_net_strength(p: &mut dyn AbstractParser) -> ReportedResult<NetStrength> {
if let Some((a, b)) = try_drive_strength(p)? {
Ok(NetStrength::Drive(a, b))
} else if let Some(s) = as_charge_strength(p.peek(0).0) {
p.bump();
Ok(NetStrength::Charge(s))
} else {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("Expected drive or charge strength").span(q));
Err(())
}
}
fn as_drive_strength(tkn: Token) -> Option<DriveStrength> {
match tkn {
Keyword(Kw::Supply0) => Some(DriveStrength::Supply0),
Keyword(Kw::Strong0) => Some(DriveStrength::Strong0),
Keyword(Kw::Pull0) => Some(DriveStrength::Pull0),
Keyword(Kw::Weak0) => Some(DriveStrength::Weak0),
Keyword(Kw::Highz0) => Some(DriveStrength::HighZ0),
Keyword(Kw::Supply1) => Some(DriveStrength::Supply1),
Keyword(Kw::Strong1) => Some(DriveStrength::Strong1),
Keyword(Kw::Pull1) => Some(DriveStrength::Pull1),
Keyword(Kw::Weak1) => Some(DriveStrength::Weak1),
Keyword(Kw::Highz1) => Some(DriveStrength::HighZ1),
_ => None,
}
}
fn as_charge_strength(tkn: Token) -> Option<ChargeStrength> {
match tkn {
Keyword(Kw::Small) => Some(ChargeStrength::Small),
Keyword(Kw::Medium) => Some(ChargeStrength::Medium),
Keyword(Kw::Large) => Some(ChargeStrength::Large),
_ => None,
}
}
fn parse_import_decl(p: &mut dyn AbstractParser) -> ReportedResult<ImportDecl> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Import))?;
if let Literal(Lit::Str(spec)) = p.peek(0).0 {
let spec = Spanned::new(spec, p.peek(0).1);
p.bump();
let property = if p.try_eat(Keyword(Kw::Context)) {
()
} else if p.try_eat(Keyword(Kw::Pure)) {
()
} else {
()
};
let cident = if p.peek(1).0 == Operator(Op::Assign) {
let (n, sp) = p.eat_ident("C identifier")?;
p.require_reported(Operator(Op::Assign))?;
Some(Spanned::new(n, sp))
} else {
None
};
let proto = parse_subroutine_prototype(p)?;
span.expand(p.last_span());
p.add_diag(DiagBuilder2::warning("unsupported DPI import").span(span));
return Ok(ImportDecl {
span: span,
items: vec![],
});
}
let items = comma_list_nonempty(p, Semicolon, "import item", |p| {
let pkg = parse_identifier(p, "package name")?;
p.require_reported(Namespace)?;
let (tkn, sp) = p.peek(0);
match tkn {
Operator(Op::Mul) => {
p.bump();
Ok(ImportItem {
pkg: pkg,
name: None,
})
}
Ident(n) | EscIdent(n) => {
p.bump();
Ok(ImportItem {
pkg: pkg,
name: Some(ast::Identifier {
id: DUMMY_NODE_ID,
span: sp,
name: n,
}),
})
}
_ => {
p.add_diag(
DiagBuilder2::error("Expected identifier or * after :: in import declaration")
.span(sp),
);
Err(())
}
}
})?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ImportDecl {
span: span,
items: items,
})
}
fn parse_assertion(p: &mut dyn AbstractParser) -> ReportedResult<Assertion> {
let mut span = p.peek(0).1;
let null = get_name_table().intern("0", false);
let is_property = p.peek(1).0 == Keyword(Kw::Property);
let is_sequence = p.peek(1).0 == Keyword(Kw::Sequence);
let is_deferred = p.peek(1).0 == Hashtag && p.peek(2).0 == Literal(Number(null, None));
let data = match p.peek(0).0 {
Keyword(Kw::Assert) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let action = parse_assertion_action_block(p)?;
AssertionData::Concurrent(ConcurrentAssertion::AssertProperty(prop, action))
}
Keyword(Kw::Assume) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let action = parse_assertion_action_block(p)?;
AssertionData::Concurrent(ConcurrentAssertion::AssumeProperty(prop, action))
}
Keyword(Kw::Cover) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let stmt = parse_stmt(p)?;
AssertionData::Concurrent(ConcurrentAssertion::CoverProperty(prop, stmt))
}
Keyword(Kw::Cover) if is_sequence => {
p.bump();
p.bump();
p.add_diag(DiagBuilder2::error("Don't know how to parse cover sequences").span(span));
return Err(());
}
Keyword(Kw::Expect) => {
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let action = parse_assertion_action_block(p)?;
AssertionData::Concurrent(ConcurrentAssertion::ExpectProperty(prop, action))
}
Keyword(Kw::Restrict) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
AssertionData::Concurrent(ConcurrentAssertion::RestrictProperty(prop))
}
Keyword(Kw::Assert) => {
p.bump();
if is_deferred {
p.bump();
p.bump();
}
let expr = flanked(p, Paren, parse_expr)?;
let action = parse_assertion_action_block(p)?;
let a = BlockingAssertion::Assert(expr, action);
if is_deferred {
AssertionData::Deferred(a)
} else {
AssertionData::Immediate(a)
}
}
Keyword(Kw::Assume) => {
p.bump();
if is_deferred {
p.bump();
p.bump();
}
let expr = flanked(p, Paren, parse_expr)?;
let action = parse_assertion_action_block(p)?;
let a = BlockingAssertion::Assume(expr, action);
if is_deferred {
AssertionData::Deferred(a)
} else {
AssertionData::Immediate(a)
}
}
Keyword(Kw::Cover) => {
p.bump();
if is_deferred {
p.bump();
p.bump();
}
let expr = flanked(p, Paren, parse_expr)?;
let stmt = parse_stmt(p)?;
let a = BlockingAssertion::Cover(expr, stmt);
if is_deferred {
AssertionData::Deferred(a)
} else {
AssertionData::Immediate(a)
}
}
_ => {
p.add_diag(
DiagBuilder2::error("Expected assert, assume, cover, expect, or restrict")
.span(span),
);
return Err(());
}
};
span.expand(p.last_span());
Ok(Assertion {
span: span,
label: None,
data: data,
})
}
fn parse_assertion_action_block(
p: &mut dyn AbstractParser,
) -> ReportedResult<AssertionActionBlock> {
if p.try_eat(Keyword(Kw::Else)) {
Ok(AssertionActionBlock::Negative(parse_stmt(p)?))
} else {
let stmt = parse_stmt(p)?;
if p.try_eat(Keyword(Kw::Else)) {
Ok(AssertionActionBlock::Both(stmt, parse_stmt(p)?))
} else {
Ok(AssertionActionBlock::Positive(stmt))
}
}
}
fn parse_property_spec(p: &mut dyn AbstractParser) -> ReportedResult<PropSpec> {
let mut span = p.peek(0).1;
p.recover_balanced(&[CloseDelim(Paren)], false);
return Ok(PropSpec);
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[allow(dead_code)]
enum PropSeqPrecedence {
Min,
AlEvIfAccRejSyn,
ImplFollow,
Until,
Iff,
Or,
And,
NotNexttime,
Intersect,
Within,
Throughout,
CycleDelay,
Brack,
Max,
}
fn parse_propexpr(p: &mut dyn AbstractParser) -> ReportedResult<PropExpr> {
parse_propexpr_prec(p, PropSeqPrecedence::Min)
}
fn parse_propexpr_prec(
p: &mut dyn AbstractParser,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExpr> {
let mut span = p.peek(0).1;
let mut pp = ParallelParser::new();
pp.add_greedy("sequence expression", move |p| {
parse_propexpr_seq(p, precedence)
});
pp.add_greedy("property expression", move |p| {
parse_propexpr_nonseq(p, precedence)
});
let data = pp.finish(p, "sequence or primary property expression")?;
span.expand(p.last_span());
let expr = PropExpr {
span: span,
data: data,
};
parse_propexpr_suffix(p, expr, precedence)
}
fn parse_propexpr_nonseq(
p: &mut dyn AbstractParser,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExprData> {
match p.peek(0).0 {
OpenDelim(Paren) => return flanked(p, Paren, parse_propexpr).map(|pe| pe.data),
Keyword(Kw::Not) => {
p.bump();
let expr = parse_propexpr_prec(p, PropSeqPrecedence::NotNexttime)?;
return Ok(PropExprData::Not(Box::new(expr)));
}
At => {
p.bump();
let ev = parse_event_expr(p, EventPrecedence::Min)?;
let expr = parse_propexpr(p)?;
return Ok(PropExprData::Clocked(ev, Box::new(expr)));
}
_ => {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("Expected primary property expression").span(q));
return Err(());
}
}
}
fn parse_propexpr_seq(
p: &mut dyn AbstractParser,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExprData> {
let (seqop, seqexpr) = match p.peek(0).0 {
Keyword(Kw::Strong) => {
p.bump();
(PropSeqOp::Strong, flanked(p, Paren, parse_seqexpr)?)
}
Keyword(Kw::Weak) => {
p.bump();
(PropSeqOp::Weak, flanked(p, Paren, parse_seqexpr)?)
}
_ => (PropSeqOp::None, parse_seqexpr_prec(p, precedence)?),
};
if precedence <= PropSeqPrecedence::ImplFollow {
if let Some(op) = match p.peek(0).0 {
Operator(Op::SeqImplOl) => Some(PropSeqBinOp::ImplOverlap),
Operator(Op::SeqImplNol) => Some(PropSeqBinOp::ImplNonoverlap),
Operator(Op::SeqFollowOl) => Some(PropSeqBinOp::FollowOverlap),
Operator(Op::SeqFollowNol) => Some(PropSeqBinOp::FollowNonoverlap),
_ => None,
} {
p.bump();
let expr = parse_propexpr_prec(p, PropSeqPrecedence::ImplFollow)?;
return Ok(PropExprData::SeqBinOp(op, seqop, seqexpr, Box::new(expr)));
}
}
Ok(PropExprData::SeqOp(seqop, seqexpr))
}
fn parse_propexpr_suffix(
p: &mut dyn AbstractParser,
prefix: PropExpr,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExpr> {
if let Some((op, prec, rassoc)) = match p.peek(0).0 {
Keyword(Kw::Or) => Some((PropBinOp::Or, PropSeqPrecedence::Or, false)),
Keyword(Kw::And) => Some((PropBinOp::And, PropSeqPrecedence::And, false)),
Keyword(Kw::Until) => Some((PropBinOp::Until, PropSeqPrecedence::Until, true)),
Keyword(Kw::SUntil) => Some((PropBinOp::SUntil, PropSeqPrecedence::Until, true)),
Keyword(Kw::UntilWith) => Some((PropBinOp::UntilWith, PropSeqPrecedence::Until, true)),
Keyword(Kw::SUntilWith) => Some((PropBinOp::SUntilWith, PropSeqPrecedence::Until, true)),
Keyword(Kw::Implies) => Some((PropBinOp::Impl, PropSeqPrecedence::Until, true)),
Keyword(Kw::Iff) => Some((PropBinOp::Iff, PropSeqPrecedence::Iff, true)),
_ => None,
} {
if precedence < prec || (rassoc && precedence == prec) {
p.bump();
let rhs = parse_propexpr_prec(p, prec)?;
return Ok(PropExpr {
span: Span::union(prefix.span, rhs.span),
data: PropExprData::BinOp(op, Box::new(prefix), Box::new(rhs)),
});
}
}
Ok(prefix)
}
fn parse_seqexpr(p: &mut dyn AbstractParser) -> ReportedResult<SeqExpr> {
parse_seqexpr_prec(p, PropSeqPrecedence::Min)
}
fn parse_seqexpr_prec(
p: &mut dyn AbstractParser,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExpr> {
let mut span = p.peek(0).1;
let mut pp = ParallelParser::new();
pp.add_greedy("expression", move |p| parse_seqexpr_expr(p, precedence));
pp.add_greedy("sequence", move |p| parse_seqexpr_nonexpr(p, precedence));
let data = pp.finish(p, "sequence or primary property expression")?;
span.expand(p.last_span());
let expr = SeqExpr {
span: span,
data: data,
};
parse_seqexpr_suffix(p, expr, precedence)
}
fn parse_seqexpr_expr(
p: &mut dyn AbstractParser,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExprData> {
let q = p.peek(0).1;
p.add_diag(
DiagBuilder2::error(
"Don't know how to parse sequence expression that don't start with an expression",
)
.span(q),
);
Err(())
}
fn parse_seqexpr_nonexpr(
p: &mut dyn AbstractParser,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExprData> {
let expr = parse_expr(p)?;
if precedence <= PropSeqPrecedence::Throughout && p.try_eat(Keyword(Kw::Throughout)) {
let rhs = parse_seqexpr_prec(p, PropSeqPrecedence::Throughout)?;
return Ok(SeqExprData::Throughout(expr, Box::new(rhs)));
}
let rep = try_flanked(p, Brack, parse_seqrep)?;
Ok(SeqExprData::Expr(expr, rep))
}
fn parse_seqexpr_suffix(
p: &mut dyn AbstractParser,
prefix: SeqExpr,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExpr> {
Ok(prefix)
}
fn parse_seqrep(p: &mut dyn AbstractParser) -> ReportedResult<SeqRep> {
match p.peek(0).0 {
Operator(Op::Mul) => {
p.bump();
if p.peek(0).0 == CloseDelim(Brack) {
Ok(SeqRep::ConsecStar)
} else {
Ok(SeqRep::Consec(parse_expr(p)?))
}
}
Operator(Op::Add) => {
p.bump();
Ok(SeqRep::ConsecPlus)
}
Operator(Op::Assign) => {
p.bump();
Ok(SeqRep::Nonconsec(parse_expr(p)?))
}
Operator(Op::LogicImpl) => {
p.bump();
Ok(SeqRep::Goto(parse_expr(p)?))
}
_ => {
let q = p.peek(0).1;
p.add_diag(
DiagBuilder2::error(
"Expected sequence repetition [+], [*], [* <expr>], [= <expr>], or [-> <expr>]",
)
.span(q),
);
Err(())
}
}
}
fn parse_inst(p: &mut dyn AbstractParser) -> ReportedResult<ast::Inst> {
let mut span = p.peek(0).1;
let target = parse_identifier(p, "module name")?;
let params = if p.try_eat(Hashtag) {
parse_parameter_assignments(p)?
} else {
Vec::new()
};
let names = comma_list_nonempty(p, Semicolon, "hierarchical instance", |p| {
let mut span = p.peek(0).1;
let name = parse_identifier(p, "instance name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let conns = flanked(p, Paren, parse_list_of_port_connections)?;
span.expand(p.last_span());
Ok(ast::InstName {
span: span,
name: name,
dims: dims,
conns: conns,
})
})?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ast::Inst {
span: span,
target: target,
params: params,
names: names,
})
}
fn parse_var_decl(p: &mut dyn AbstractParser) -> ReportedResult<ast::VarDecl> {
let mut span = p.peek(0).1;
let konst = p.try_eat(Keyword(Kw::Const));
let var = p.try_eat(Keyword(Kw::Var));
let lifetime = as_lifetime(p.peek(0).0);
if lifetime.is_some() {
p.bump();
}
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
if var {
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
}
let (ty, names) = pp.finish(p, "explicit or implicit type")?;
fn tail(p: &mut dyn AbstractParser) -> ReportedResult<Vec<VarDeclName>> {
let names = comma_list_nonempty(
p,
Semicolon,
"variable name",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok(names)
}
span.expand(p.last_span());
Ok(ast::VarDecl {
span: span,
konst: konst,
var: var,
lifetime: lifetime,
ty: ty,
names: names,
})
}
fn parse_param_decl(
p: &mut dyn AbstractParser,
keyword_optional: bool,
) -> ReportedResult<ast::ParamDecl> {
let mut span = p.peek(0).1;
let local = match p.peek(0) {
(Keyword(Kw::Localparam), _) => {
p.bump();
true
}
(Keyword(Kw::Parameter), _) => {
p.bump();
false
}
(_, _) if keyword_optional => false,
(tkn, sp) => {
p.add_diag(
DiagBuilder2::error(format!(
"expected `parameter` or `localparam`, but found {} instead",
tkn
))
.span(sp),
);
return Err(());
}
};
let predicate = FuncPredicate {
match_func: |p| match p.peek(0).0 {
Semicolon | CloseDelim(Paren) => true,
Comma => match p.peek(1).0 {
Keyword(Kw::Parameter) | Keyword(Kw::Localparam) => true,
_ => false,
},
_ => false,
},
recover_func: |p, consume| p.recover_balanced(&[CloseDelim(Paren), Semicolon], consume),
desc: ") or ;",
};
let kind = if p.try_eat(Keyword(Kw::Type)) {
let decls = comma_list_nonempty(p, predicate, "parameter name", |p| {
let mut span = p.peek(0).1;
let name = parse_identifier(p, "parameter name")?;
let ty = if p.try_eat(Operator(Op::Assign)) {
Some(parse_explicit_type(p)?)
} else {
None
};
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
Ok(ast::ParamTypeDecl {
span: span,
name: name,
ty: ty,
})
})?;
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
ast::ParamKind::Type(decls)
} else {
let decls = comma_list_nonempty(p, predicate, "parameter name", |p| {
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
tail(p, ty)
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
tail(p, ty)
});
fn tail(p: &mut dyn AbstractParser, ty: Type) -> ReportedResult<ast::ParamValueDecl> {
let mut span = p.peek(0).1;
let name = parse_identifier(p, "parameter name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
Ok(ast::ParamValueDecl {
span: span,
ty: ty,
name: name,
dims: dims,
expr: expr,
})
}
pp.finish(p, "explicit or implicit type")
})?;
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
ast::ParamKind::Value(decls)
};
span.expand(p.last_span());
Ok(ParamDecl {
span: span,
local: local,
kind: kind,
})
}
fn parse_hname(p: &mut dyn AbstractParser, msg: &str) -> ReportedResult<ast::Identifier> {
parse_identifier(p, msg)
}
#[cfg(test)]
mod tests {
use crate::lexer::*;
use crate::preproc::*;
use moore_common::source::*;
fn parse(input: &str) {
use std::cell::Cell;
thread_local!(static INDEX: Cell<usize> = Cell::new(0));
let sm = get_source_manager();
let idx = INDEX.with(|i| {
let v = i.get();
i.set(v + 1);
v
});
let source = sm.add(&format!("test_{}.sv", idx), input);
let pp = Preprocessor::new(source, &[], &[]);
let lexer = Lexer::new(pp);
super::parse(lexer).unwrap();
}
#[test]
fn intf_empty() {
parse("interface Foo; endinterface");
}
#[test]
fn intf_params() {
parse("interface Foo #(); endinterface");
parse("interface Foo #(parameter bar = 32); endinterface");
parse("interface Foo #(parameter bar = 32, baz = 64); endinterface");
parse("interface Foo #(parameter bar = 32, parameter baz = 64); endinterface");
}
#[test]
fn intf_header() {
}
}