#![allow(unused_variables)]
#![allow(unused_mut)]
#![allow(dead_code)]
use crate::ast;
use crate::ast::*;
use crate::lexer::{Lexer, TokenAndSpan};
use crate::token::*;
use moore_common::{arenas::Alloc, errors::*, name::*, source::*, util::HasSpan};
use std;
use std::collections::VecDeque;
type ParseResult<T> = Result<T, DiagBuilder2>;
type ReportedResult<T> = Result<T, ()>;
trait AbstractParser<'n> {
fn arena(&self) -> &'n ast::Arena<'n>;
fn peek(&mut self, offset: usize) -> TokenAndSpan;
fn bump(&mut self);
fn skip(&mut self);
fn consumed(&self) -> usize;
fn last_span(&self) -> Span;
fn add_diag(&mut self, diag: DiagBuilder2);
fn severity(&self) -> Severity;
fn try_eat_ident(&mut self) -> Option<(Name, Span)> {
match self.peek(0) {
(Ident(name), span) => {
self.bump();
Some((name, span))
}
(EscIdent(name), span) => {
self.bump();
Some((name, span))
}
_ => None,
}
}
fn eat_ident_or(&mut self, msg: &str) -> ParseResult<(Name, Span)> {
match self.peek(0) {
(Ident(name), span) => {
self.bump();
Ok((name, span))
}
(EscIdent(name), span) => {
self.bump();
Ok((name, span))
}
(tkn, span) => {
Err(DiagBuilder2::error(format!("expected {} before `{}`", msg, tkn)).span(span))
}
}
}
fn eat_ident(&mut self, msg: &str) -> ReportedResult<(Name, Span)> {
match self.peek(0) {
(Ident(name), span) => {
self.bump();
Ok((name, span))
}
(EscIdent(name), span) => {
self.bump();
Ok((name, span))
}
(tkn, span) => {
self.add_diag(
DiagBuilder2::error(format!("expected {} before `{}`", msg, tkn)).span(span),
);
Err(())
}
}
}
fn is_ident(&mut self) -> bool {
match self.peek(0).0 {
Ident(_) | EscIdent(_) => true,
_ => false,
}
}
fn require(&mut self, expect: Token) -> Result<(), DiagBuilder2> {
match self.peek(0) {
(actual, _) if actual == expect => {
self.bump();
Ok(())
}
(wrong, span) => Err(DiagBuilder2::error(format!(
"expected `{}`, but found `{}` instead",
expect, wrong
))
.span(span)),
}
}
fn require_reported(&mut self, expect: Token) -> ReportedResult<()> {
match self.require(expect) {
Ok(x) => Ok(x),
Err(e) => {
self.add_diag(e);
Err(())
}
}
}
fn try_eat(&mut self, expect: Token) -> bool {
match self.peek(0) {
(actual, _) if actual == expect => {
self.bump();
true
}
_ => false,
}
}
fn recover_balanced(&mut self, terminators: &[Token], eat_terminator: bool) {
let mut stack = Vec::new();
loop {
let (tkn, sp) = self.peek(0);
if stack.is_empty() {
for t in terminators {
if *t == tkn {
if eat_terminator {
self.skip();
}
return;
}
}
}
match tkn {
OpenDelim(x) => stack.push(x),
CloseDelim(x) => {
if let Some(open) = stack.pop() {
if open != x {
self.add_diag(
DiagBuilder2::fatal(format!(
"found closing `{}` which is not the complement to the \
previous opening `{}`",
CloseDelim(x),
OpenDelim(open)
))
.span(sp),
);
break;
}
} else {
self.add_diag(
DiagBuilder2::fatal(format!(
"found closing `{}` without an earlier opening `{}`",
CloseDelim(x),
OpenDelim(x)
))
.span(sp),
);
break;
}
}
Eof => break,
_ => (),
}
self.skip();
}
}
fn is_fatal(&self) -> bool {
self.severity() >= Severity::Fatal
}
fn is_error(&self) -> bool {
self.severity() >= Severity::Error
}
fn anticipate(&mut self, tokens: &[Token]) -> ReportedResult<()> {
let (tkn, sp) = self.peek(0);
for t in tokens {
if *t == tkn {
return Ok(());
}
}
self.add_diag(
DiagBuilder2::error(format!(
"expected {:?}, but found {:?} instead",
tokens, tkn
))
.span(sp),
);
Err(())
}
}
struct Parser<'a, 'n> {
input: Lexer<'a>,
queue: VecDeque<TokenAndSpan>,
diagnostics: Vec<DiagBuilder2>,
last_span: Span,
severity: Severity,
consumed: usize,
arena: &'n ast::Arena<'n>,
}
impl<'a, 'n> AbstractParser<'n> for Parser<'a, 'n> {
fn arena(&self) -> &'n ast::Arena<'n> {
self.arena
}
fn peek(&mut self, offset: usize) -> TokenAndSpan {
self.ensure_queue_filled(offset);
if offset < self.queue.len() {
self.queue[offset]
} else {
*self
.queue
.back()
.expect("At least an Eof token should be in the queue")
}
}
fn bump(&mut self) {
if self.queue.is_empty() {
self.ensure_queue_filled(1);
}
if let Some((_, sp)) = self.queue.pop_front() {
self.last_span = sp;
self.consumed += 1;
}
}
fn skip(&mut self) {
self.bump()
}
fn consumed(&self) -> usize {
self.consumed
}
fn last_span(&self) -> Span {
self.last_span
}
fn add_diag(&mut self, diag: DiagBuilder2) {
eprintln!("");
eprintln!("{}", diag);
if diag.get_severity() >= Severity::Warning {
trace!(
"Diagnostic triggered here:\n{:?}",
backtrace::Backtrace::new()
);
}
if diag.get_severity() > self.severity {
self.severity = diag.get_severity();
}
self.diagnostics.push(diag);
}
fn severity(&self) -> Severity {
self.severity
}
}
impl<'a, 'n> Parser<'a, 'n> {
fn new(input: Lexer<'a>, arena: &'n ast::Arena<'n>) -> Self {
Parser {
input: input,
queue: VecDeque::new(),
diagnostics: Vec::new(),
last_span: INVALID_SPAN,
severity: Severity::Note,
consumed: 0,
arena,
}
}
fn ensure_queue_filled(&mut self, min_tokens: usize) {
if let Some(&(Eof, _)) = self.queue.back() {
return;
}
while self.queue.len() <= min_tokens {
match self.input.next_token() {
Ok((Eof, sp)) => self.queue.push_back((Eof, sp)),
Ok(tkn) => self.queue.push_back(tkn),
Err(x) => self.add_diag(x),
}
}
}
}
fn flanked<'n, R, F>(
p: &mut dyn AbstractParser<'n>,
delim: DelimToken,
mut inner: F,
) -> ReportedResult<R>
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R>,
{
p.require_reported(OpenDelim(delim))?;
match inner(p) {
Ok(r) => match p.require_reported(CloseDelim(delim)) {
Ok(_) => Ok(r),
Err(e) => {
p.recover_balanced(&[CloseDelim(delim)], true);
Err(e)
}
},
Err(e) => {
p.recover_balanced(&[CloseDelim(delim)], true);
Err(e)
}
}
}
fn try_flanked<'n, R, F>(
p: &mut dyn AbstractParser<'n>,
delim: DelimToken,
inner: F,
) -> ReportedResult<Option<R>>
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R>,
{
if p.peek(0).0 == OpenDelim(delim) {
flanked(p, delim, inner).map(|r| Some(r))
} else {
Ok(None)
}
}
fn comma_list<'n, R, F, T>(
p: &mut dyn AbstractParser<'n>,
mut term: T,
msg: &str,
mut item: F,
) -> ReportedResult<Vec<R>>
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R>,
T: Predicate,
{
let mut v = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Eof && !term.matches(p) {
match item(p) {
Ok(x) => v.push(x),
Err(e) => {
term.recover(p, false);
return Err(e);
}
}
if term.matches(p) {
break;
} else if p.try_eat(Comma) {
if term.matches(p) {
let q = p.last_span();
p.add_diag(DiagBuilder2::warning("superfluous trailing comma").span(q));
break;
}
} else {
let sp = p.peek(0).1;
p.add_diag(
DiagBuilder2::error(format!("expected , or {} after {}", term.describe(), msg))
.span(sp),
);
term.recover(p, false);
return Err(());
}
}
Ok(v)
}
fn comma_list_nonempty<'n, R, F, T>(
p: &mut dyn AbstractParser<'n>,
term: T,
msg: &str,
item: F,
) -> ReportedResult<Vec<R>>
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R>,
T: Predicate,
{
let q = p.peek(0).1;
let v = comma_list(p, term, msg, item)?;
if v.is_empty() {
p.add_diag(DiagBuilder2::error(format!("expected at least one {}", msg)).span(q));
Err(())
} else {
Ok(v)
}
}
fn repeat_until<'n, R, F>(
p: &mut dyn AbstractParser<'n>,
term: Token,
mut item: F,
) -> ReportedResult<Vec<R>>
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R>,
{
let mut v = Vec::new();
while p.peek(0).0 != term && p.peek(0).0 != Eof {
match item(p) {
Ok(x) => v.push(x),
Err(_) => {
p.recover_balanced(&[term], false);
break;
}
}
}
Ok(v)
}
fn recovered<'n, R, F>(
p: &mut dyn AbstractParser<'n>,
term: Token,
mut item: F,
) -> ReportedResult<R>
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R>,
{
match item(p) {
Ok(x) => Ok(x),
Err(e) => {
p.recover_balanced(&[term], false);
Err(e)
}
}
}
#[allow(dead_code)]
fn r#try<'n, R, F>(p: &mut dyn AbstractParser<'n>, mut parse: F) -> Option<R>
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R>,
{
let mut bp = BranchParser::new(p);
match parse(&mut bp) {
Ok(r) => {
bp.commit();
Some(r)
}
Err(_) => None,
}
}
fn parse_identifier<'n, M: std::fmt::Display>(
p: &mut dyn AbstractParser<'n>,
msg: M,
) -> ReportedResult<ast::Identifier> {
parse_identifier_name(p, msg).map(|n| ast::Identifier {
span: n.span,
name: n.value,
})
}
fn parse_identifier_name<'n, M: std::fmt::Display>(
p: &mut dyn AbstractParser<'n>,
msg: M,
) -> ReportedResult<Spanned<Name>> {
let (tkn, span) = p.peek(0);
match tkn {
Ident(n) | EscIdent(n) => {
p.bump();
Ok(Spanned::new(n, span))
}
x => {
p.add_diag(
DiagBuilder2::error(format!("expected {}, but found `{}` instead", msg, x))
.span(span),
);
Err(())
}
}
}
fn parse_string_literal<'n, M: std::fmt::Display>(
p: &mut dyn AbstractParser<'n>,
msg: M,
) -> ReportedResult<Spanned<Name>> {
let (tkn, span) = p.peek(0);
match tkn {
Literal(Lit::Str(n)) => {
p.bump();
Ok(Spanned::new(n, span))
}
x => {
p.add_diag(
DiagBuilder2::error(format!("expected {}, but found `{}` instead", msg, x))
.span(span),
);
Err(())
}
}
}
fn try_identifier<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Option<ast::Identifier>> {
try_identifier_name(p).map(|n| {
n.map(|n| ast::Identifier {
span: n.span,
name: n.value,
})
})
}
fn try_identifier_name<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Option<Spanned<Name>>> {
let (tkn, span) = p.peek(0);
match tkn {
Ident(n) | EscIdent(n) => {
p.bump();
Ok(Some(Spanned::new(n, span)))
}
_ => Ok(None),
}
}
trait Predicate {
fn matches(&mut self, _: &mut dyn AbstractParser<'_>) -> bool;
fn recover(&mut self, _: &mut dyn AbstractParser<'_>, consume: bool);
fn describe(&self) -> String;
}
impl Predicate for Token {
fn matches(&mut self, p: &mut dyn AbstractParser<'_>) -> bool {
p.peek(0).0 == *self
}
fn recover(&mut self, p: &mut dyn AbstractParser<'_>, consume: bool) {
p.recover_balanced(&[*self], consume)
}
fn describe(&self) -> String {
self.as_str().into()
}
}
struct FuncPredicate<
M: FnMut(&mut dyn AbstractParser<'_>) -> bool,
R: FnMut(&mut dyn AbstractParser<'_>, bool),
> {
match_func: M,
recover_func: R,
desc: &'static str,
}
impl<
M: FnMut(&mut dyn AbstractParser<'_>) -> bool,
R: FnMut(&mut dyn AbstractParser<'_>, bool),
> Predicate for FuncPredicate<M, R>
{
fn matches(&mut self, p: &mut dyn AbstractParser<'_>) -> bool {
(self.match_func)(p)
}
fn recover(&mut self, p: &mut dyn AbstractParser<'_>, consume: bool) {
(self.recover_func)(p, consume)
}
fn describe(&self) -> String {
self.desc.into()
}
}
pub fn parse<'n>(input: Lexer, arena: &'n ast::Arena<'n>) -> Result<ast::SourceFile<'n>, ()> {
let mut p = Parser::new(input, arena);
let root = parse_source_text(&mut p);
if p.is_error() {
Err(())
} else {
Ok(root)
}
}
fn parse_source_text<'n>(p: &mut dyn AbstractParser<'n>) -> ast::SourceFile<'n> {
let mut span = p.peek(0).1;
let mut root = ast::SourceFileData {
timeunits: Timeunit {
unit: None,
prec: None,
},
items: Vec::new(),
};
match parse_time_units(p) {
Ok(x) => root.timeunits = x,
Err(()) => (),
}
while !p.is_fatal() && p.peek(0).0 != Eof {
match parse_item(p) {
Ok(item) => root.items.push(item),
Err(()) => (), }
}
span.expand(p.last_span());
ast::SourceFile::new(span, root)
}
fn parse_time_units<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Timeunit> {
let mut unit = None;
let mut prec = None;
while p.peek(0).0 == Keyword(Kw::Timeunit) || p.peek(0).0 == Keyword(Kw::Timeprecision) {
recovered(p, Semicolon, |p| {
if p.try_eat(Keyword(Kw::Timeunit)) {
unit = Some(parse_time_literal(p)?);
if p.try_eat(Operator(Op::Div)) {
prec = Some(parse_time_literal(p)?);
}
} else if p.try_eat(Keyword(Kw::Timeprecision)) {
prec = Some(parse_time_literal(p)?);
} else {
unreachable!();
}
Ok(())
})?;
p.require_reported(Semicolon)?;
}
Ok(Timeunit { unit, prec })
}
fn parse_time_literal<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Spanned<Lit>> {
let (tkn, sp) = p.peek(0);
match tkn {
Literal(lit @ Time(..)) => {
p.bump();
Ok(Spanned::new(lit, sp))
}
_ => {
p.add_diag(
DiagBuilder2::error(format!("expected time literal, instead got `{}`", tkn))
.span(sp),
);
Err(())
}
}
}
fn as_lifetime(tkn: Token) -> Option<Lifetime> {
match tkn {
Keyword(Kw::Static) => Some(Lifetime::Static),
Keyword(Kw::Automatic) => Some(Lifetime::Automatic),
_ => None,
}
}
fn parse_interface_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Interface<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Interface))?;
let result = recovered(p, Keyword(Kw::Endinterface), |p| {
let lifetime = match as_lifetime(p.peek(0).0) {
Some(l) => {
p.bump();
l
}
None => Lifetime::Static,
};
let (name, name_sp) = p.eat_ident("interface name")?;
let param_ports = if p.try_eat(Hashtag) {
parse_parameter_port_list(p)?
} else {
Vec::new()
};
let ports = if p.try_eat(OpenDelim(Paren)) {
parse_port_list(p)?
} else {
Vec::new()
};
if !p.try_eat(Semicolon) {
let q = p.peek(0).1.end();
p.add_diag(
DiagBuilder2::error(format!(
"Missing semicolon \";\" after header of interface \"{}\"",
name
))
.span(q),
);
}
let mut items = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Keyword(Kw::Endinterface) && p.peek(0).0 != Eof {
if p.try_eat(Semicolon) {
continue;
}
items.push(parse_item(p)?);
}
span.expand(p.last_span());
Ok(Interface::new(
span,
InterfaceData {
lifetime: lifetime,
name: Spanned::new(name, name_sp),
params: param_ports,
ports: ports,
items: items,
},
))
});
p.require_reported(Keyword(Kw::Endinterface))?;
if p.try_eat(Colon) {
p.eat_ident("interface name")?;
}
result
}
fn parse_parameter_port_list<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Vec<ParamDecl<'n>>> {
let mut local = false;
flanked(p, Paren, |p| {
comma_list(p, CloseDelim(Paren), "parameter port", |p| {
let mut outer_span = p.peek(0).1;
match p.peek(0).0 {
Keyword(Kw::Parameter) => {
p.bump();
local = false;
}
Keyword(Kw::Localparam) => {
p.bump();
local = true;
}
_ => (),
};
let kind = if p.try_eat(Keyword(Kw::Type)) {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "parameter name")?;
let ty = if p.try_eat(Operator(Op::Assign)) {
Some(parse_explicit_type(p)?)
} else {
None
};
p.anticipate(&[Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
ast::ParamKind::Type(vec![ast::ParamTypeDecl::new(
span,
ParamTypeDeclData { name, ty },
)])
} else {
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
tail(p, ty)
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
tail(p, ty)
});
fn tail<'n>(
p: &mut dyn AbstractParser<'n>,
ty: Type<'n>,
) -> ReportedResult<ast::ParamValueDecl<'n>> {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "parameter name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
Ok(ParamValueDecl::new(
span,
ParamValueDeclData {
ty,
name,
dims,
expr,
},
))
}
ast::ParamKind::Value(vec![pp.finish(p, "explicit or implicit type")?])
};
outer_span.expand(p.last_span());
Ok(ast::ParamDecl::new(
outer_span,
ast::ParamDeclData { local, kind },
))
})
})
}
fn parse_module_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Module<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Module))?;
let result = recovered(p, Keyword(Kw::Endmodule), |p| {
let lifetime = match as_lifetime(p.peek(0).0) {
Some(l) => {
p.bump();
l
}
None => Lifetime::Static,
};
let (name, name_sp) = p.eat_ident("module name")?;
let mut imports = vec![];
while p.peek(0).0 == Keyword(Kw::Import) {
imports.push(parse_import_decl(p)?);
}
let params = if p.try_eat(Hashtag) {
parse_parameter_port_list(p)?
} else {
Vec::new()
};
let ports = if p.try_eat(OpenDelim(Paren)) {
parse_port_list(p)?
} else {
Vec::new()
};
if !p.try_eat(Semicolon) {
let q = p.peek(0).1.end();
p.add_diag(
DiagBuilder2::error(format!("Missing ; after header of module \"{}\"", name))
.span(q),
);
}
let mut items = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Keyword(Kw::Endmodule) && p.peek(0).0 != Eof {
if p.try_eat(Semicolon) {
continue;
}
items.push(parse_item(p)?);
}
span.expand(p.last_span());
Ok(Module::new(
span,
ModuleData {
lifetime,
name: Spanned::new(name, name_sp),
imports,
params,
ports,
items,
},
))
});
let sp = p.peek(0).1;
p.require_reported(Keyword(Kw::Endmodule))?;
if p.try_eat(Colon) {
p.eat_ident("module name")?;
}
result
}
fn parse_package_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Package<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Package))?;
let result = recovered(p, Keyword(Kw::Endpackage), |p| {
let lifetime = match as_lifetime(p.peek(0).0) {
Some(x) => {
p.bump();
x
}
None => Lifetime::Static,
};
let (name, name_span) = p.eat_ident("package name")?;
p.require_reported(Semicolon)?;
let timeunits = Timeunit {
unit: None,
prec: None,
};
let mut items = Vec::new();
while !p.is_fatal() && p.peek(0).0 != Keyword(Kw::Endpackage) && p.peek(0).0 != Eof {
if p.try_eat(Semicolon) {
continue;
}
items.push(parse_item(p)?);
}
span.expand(p.last_span());
Ok(Package::new(
span,
PackageData {
lifetime: lifetime,
name: Spanned::new(name, name_span),
timeunits: timeunits,
items: items,
},
))
});
p.require_reported(Keyword(Kw::Endpackage))?;
if p.try_eat(Colon) {
p.eat_ident("package name")?;
}
result
}
fn parse_program_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<()> {
p.require_reported(Keyword(Kw::Program))?;
let result = recovered(p, Keyword(Kw::Endprogram), |p| {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("Don't know how to parse program declarations").span(q));
Err(())
});
p.require_reported(Keyword(Kw::Endprogram))?;
result
}
fn parse_item<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Item<'n>> {
let mut span = p.peek(0).1;
let item = parse_item_data(p)?;
span.expand(p.last_span());
Ok(Item::new(span, item))
}
fn parse_item_data<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ItemData<'n>> {
if p.is_ident() && p.peek(1).0 == Colon {
p.bump();
p.bump();
}
let class_follows = p.peek(1).0 == Keyword(Kw::Class);
let strlit_follows = match p.peek(1).0 {
Literal(Lit::Str(..)) => true,
_ => false,
};
match p.peek(0).0 {
Keyword(Kw::Module) => return parse_module_decl(p).map(ItemData::ModuleDecl),
Keyword(Kw::Interface) | Keyword(Kw::Virtual) if class_follows => {
return parse_class_decl(p).map(ItemData::ClassDecl)
}
Keyword(Kw::Class) => return parse_class_decl(p).map(ItemData::ClassDecl),
Keyword(Kw::Interface) => return parse_interface_decl(p).map(ItemData::InterfaceDecl),
Keyword(Kw::Package) => return parse_package_decl(p).map(ItemData::PackageDecl),
Keyword(Kw::Program) => return parse_program_decl(p).map(ItemData::ProgramDecl),
Keyword(Kw::Localparam) | Keyword(Kw::Parameter) => {
let decl = parse_param_decl(p, false)?;
p.require_reported(Semicolon)?;
return Ok(ItemData::ParamDecl(decl));
}
Keyword(Kw::Modport) => return parse_modport_decl(p).map(|x| ItemData::ModportDecl(x)),
Keyword(Kw::Typedef) => return parse_typedef(p).map(|x| ItemData::Typedef(x)),
Keyword(Kw::Import) if strlit_follows => return parse_dpi_decl(p).map(ItemData::DpiDecl),
Keyword(Kw::Export) => return parse_dpi_decl(p).map(ItemData::DpiDecl),
Keyword(Kw::Import) => return parse_import_decl(p).map(|x| ItemData::ImportDecl(x)),
Keyword(Kw::Initial) => {
return parse_procedure(p, ProcedureKind::Initial).map(|x| ItemData::Procedure(x));
}
Keyword(Kw::Always) => {
return parse_procedure(p, ProcedureKind::Always).map(|x| ItemData::Procedure(x));
}
Keyword(Kw::AlwaysComb) => {
return parse_procedure(p, ProcedureKind::AlwaysComb).map(|x| ItemData::Procedure(x));
}
Keyword(Kw::AlwaysLatch) => {
return parse_procedure(p, ProcedureKind::AlwaysLatch).map(|x| ItemData::Procedure(x));
}
Keyword(Kw::AlwaysFf) => {
return parse_procedure(p, ProcedureKind::AlwaysFf).map(|x| ItemData::Procedure(x));
}
Keyword(Kw::Final) => {
return parse_procedure(p, ProcedureKind::Final).map(|x| ItemData::Procedure(x));
}
Keyword(Kw::Function) | Keyword(Kw::Task) => {
return parse_subroutine_decl(p).map(|x| ItemData::SubroutineDecl(x));
}
Keyword(Kw::Inout) | Keyword(Kw::Input) | Keyword(Kw::Output) | Keyword(Kw::Ref) => {
return parse_port_decl(p).map(|x| ItemData::PortDecl(x));
}
Keyword(Kw::Assign) => {
return parse_continuous_assign(p).map(|x| ItemData::ContAssign(x));
}
Keyword(Kw::Genvar) => {
p.bump();
let decl = comma_list_nonempty(p, Semicolon, "genvar declaration", parse_genvar_decl)?;
p.require_reported(Semicolon)?;
return Ok(ItemData::GenvarDecl(decl));
}
Keyword(Kw::Generate) => {
let mut span = p.peek(0).1;
p.bump();
let items = repeat_until(p, Keyword(Kw::Endgenerate), parse_generate_item)?;
p.require_reported(Keyword(Kw::Endgenerate))?;
span.expand(p.last_span());
return Ok(ItemData::GenerateRegion(span, items));
}
Keyword(Kw::For) => return parse_generate_for(p).map(|x| ItemData::GenerateFor(x)),
Keyword(Kw::If) => return parse_generate_if(p).map(|x| ItemData::GenerateIf(x)),
Keyword(Kw::Case) => return parse_generate_case(p).map(|x| ItemData::GenerateCase(x)),
Keyword(Kw::Assert)
| Keyword(Kw::Assume)
| Keyword(Kw::Cover)
| Keyword(Kw::Expect)
| Keyword(Kw::Restrict) => return parse_assertion(p).map(|x| ItemData::Assertion(x)),
Semicolon => {
p.bump();
return Ok(ItemData::Dummy);
}
Keyword(Kw::Default) => {
p.bump();
let mut span = p.last_span();
if p.try_eat(Keyword(Kw::Clocking)) {
let name = p.eat_ident("clocking identifier")?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
return Ok(ItemData::Dummy);
}
if p.try_eat(Keyword(Kw::Disable)) {
p.require_reported(Keyword(Kw::Iff))?;
let expr = parse_expr(p)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
return Ok(ItemData::Dummy);
}
p.add_diag(
DiagBuilder2::error("expected `clocking` or `disable` after `default`").span(span),
);
p.recover_balanced(&[Semicolon], true);
return Err(());
}
SysIdent(..) => return parse_elab_system_task(p).map(|_| ItemData::Dummy),
_ => (),
}
let mut pp = ParallelParser::new();
pp.add_greedy("net declaration", |p| {
parse_net_decl(p).map(|d| ItemData::NetDecl(d))
});
pp.add("instantiation", |p| {
parse_inst(p).map(|i| ItemData::Inst(i))
});
pp.add("variable declaration", |p| {
parse_var_decl(p).map(|d| ItemData::VarDecl(d))
});
let res = pp.finish(p, "hierarchy item");
if res.is_err() {
p.recover_balanced(&[Semicolon], true);
}
res
}
fn parse_elab_system_task<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<()> {
let mut span = p.peek(0).1;
let name = match p.peek(0).0 {
SysIdent(name) => name,
_ => unreachable!(),
};
p.recover_balanced(&[Semicolon], true);
span.expand(p.last_span());
p.add_diag(DiagBuilder2::warning("unsupported elaboration system task").span(span));
Ok(())
}
fn parse_localparam_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<()> {
p.require_reported(Keyword(Kw::Localparam))?;
loop {
let (name, name_sp) = match p.eat_ident_or("parameter name") {
Ok(x) => x,
Err(e) => {
p.add_diag(e);
return Err(());
}
};
if p.try_eat(Operator(Op::Assign)) {
match parse_expr(p) {
Ok(_) => (),
Err(_) => p.recover_balanced(&[Comma, Semicolon], false),
}
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.peek(0).0 == Semicolon {
p.add_diag(DiagBuilder2::warning("superfluous trailing comma").span(sp));
break;
}
}
(Semicolon, _) => break,
(x, sp) => {
p.add_diag(
DiagBuilder2::error(format!("expected , or ; after localparam, found {}", x))
.span(sp),
);
return Err(());
}
}
}
p.require_reported(Semicolon)?;
Ok(())
}
fn parse_parameter_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<()> {
p.require_reported(Keyword(Kw::Parameter))?;
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, ()) = pp.finish(p, "explicit or implicit type")?;
fn tail<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<()> {
let names = parse_parameter_names(p)?;
p.require_reported(Semicolon)?;
Ok(())
}
return Ok(());
}
fn parse_parameter_names<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Vec<()>> {
let v = comma_list_nonempty(p, Semicolon, "parameter name", |p| {
let (name, name_sp) = p.eat_ident("parameter name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
Ok(())
})?;
Ok(v)
}
fn parse_modport_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::Modport<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Modport))?;
let names = comma_list_nonempty(p, Semicolon, "modport item", parse_modport_item)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ast::Modport::new(span, ast::ModportData { names }))
}
fn parse_modport_item<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<&'n ast::ModportName<'n>> {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "modport name")?;
let ports = flanked(p, Paren, |p| {
comma_list(
p,
CloseDelim(Paren),
"modport ports declaration",
parse_modport_ports_decl,
)
})?;
span.expand(p.last_span());
Ok(p.arena().alloc(ast::ModportName::new(
span,
ast::ModportNameData { name, ports },
)))
}
fn parse_modport_ports_decl<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<&'n ast::ModportPort<'n>> {
let mut span = p.peek(0).1;
if let Some(dir) = as_port_direction(p.peek(0).0) {
let dir = Spanned::new(dir, p.peek(0).1);
let mut port: Vec<&_> = vec![];
p.bump();
loop {
let mut span = p.peek(0).1;
let (name, expr) = if p.try_eat(Period) {
let name = parse_identifier_name(p, "port name")?;
p.require_reported(OpenDelim(Paren))?;
let expr: &_ = p.arena().alloc(parse_expr(p)?);
p.require_reported(CloseDelim(Paren))?;
(name, Some(expr))
} else {
let name = parse_identifier_name(p, "port name")?;
(name, None)
};
span.expand(p.last_span());
port.push(p.arena().alloc(ast::ModportSimplePort::new(
span,
ast::ModportSimplePortData { name, expr },
)));
match (p.peek(0).0, p.peek(1).0) {
(Comma, Keyword(_)) => break,
(Comma, _) => {
p.bump();
continue;
}
_ => break,
}
}
span.expand(p.last_span());
return Ok(p.arena().alloc(ast::ModportPort::new(
span,
ast::ModportPortData::Simple { dir, port },
)));
}
if p.try_eat(Keyword(Kw::Import)) || p.try_eat(Keyword(Kw::Export)) {
p.add_diag(
DiagBuilder2::error("modport task/function ports not implemented").span(p.last_span()),
);
return Err(());
}
if p.try_eat(Keyword(Kw::Clocking)) {
p.add_diag(DiagBuilder2::error("modport clocking declaration not implemented").span(span));
return Err(());
}
p.add_diag(DiagBuilder2::error("expected modport port declaration").span(span));
Err(())
}
fn as_port_direction(tkn: Token) -> Option<PortDir> {
match tkn {
Keyword(Kw::Input) => Some(PortDir::Input),
Keyword(Kw::Output) => Some(PortDir::Output),
Keyword(Kw::Inout) => Some(PortDir::Inout),
Keyword(Kw::Ref) => Some(PortDir::Ref),
_ => None,
}
}
fn parse_data_type<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Type<'n>> {
{
let mut bp = BranchParser::new(p);
match parse_explicit_type(&mut bp) {
Ok(x) => {
bp.commit();
return Ok(x);
}
Err(_) => (),
}
}
parse_implicit_type(p)
}
fn parse_explicit_type<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Type<'n>> {
let mut span = p.peek(0).1;
let data = parse_type_data(p)?;
span.expand(p.last_span());
let ty = parse_type_signing_and_dimensions(p, span, data)?;
parse_type_suffix(p, ty)
}
fn parse_type_suffix<'n>(p: &mut dyn AbstractParser<'n>, ty: Type<'n>) -> ReportedResult<Type<'n>> {
let tkn = p.peek(0).0;
let sp = ty.span;
match tkn {
Period => {
p.bump();
let name = parse_identifier_name(p, "member type name")?;
let subty = parse_type_signing_and_dimensions(
p,
sp,
ScopedType {
ty: Box::new(ty),
member: true,
name: name,
},
)?;
parse_type_suffix(p, subty)
}
Namespace => {
p.bump();
let name = parse_identifier_name(p, "type name")?;
let subty = parse_type_signing_and_dimensions(
p,
sp,
ScopedType {
ty: Box::new(ty),
member: false,
name: name,
},
)?;
parse_type_suffix(p, subty)
}
Hashtag => {
p.bump();
let params = parse_parameter_assignments(p)?;
let span = Span::union(sp, p.last_span());
parse_type_suffix(
p,
ast::Type::new(
span,
ast::TypeData {
kind: ast::TypeKind::new(span, ast::SpecializedType(Box::new(ty), params)),
sign: ast::TypeSign::None,
dims: Vec::new(),
},
),
)
}
_ => Ok(ty),
}
}
fn parse_implicit_type<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Type<'n>> {
let span = p.peek(0).1.begin().into();
parse_type_signing_and_dimensions(p, span, ImplicitType)
}
fn parse_type_signing_and_dimensions<'n>(
p: &mut dyn AbstractParser<'n>,
mut span: Span,
kind: TypeKindData<'n>,
) -> ReportedResult<Type<'n>> {
let kind = TypeKind::new(span, kind);
let sign = match p.peek(0).0 {
Keyword(Kw::Signed) => {
p.bump();
TypeSign::Signed
}
Keyword(Kw::Unsigned) => {
p.bump();
TypeSign::Unsigned
}
_ => TypeSign::None,
};
let (dims, _) = parse_optional_dimensions(p)?;
span.expand(p.last_span());
Ok(Type::new(span, TypeData { kind, sign, dims }))
}
fn parse_type_data<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<TypeKindData<'n>> {
let (tkn, sp) = p.peek(0);
match tkn {
Keyword(Kw::Void) => {
p.bump();
Ok(ast::VoidType)
}
Keyword(Kw::String) => {
p.bump();
Ok(ast::StringType)
}
Keyword(Kw::Chandle) => {
p.bump();
Ok(ast::ChandleType)
}
Keyword(Kw::Event) => {
p.bump();
Ok(ast::EventType)
}
Keyword(Kw::Bit) => {
p.bump();
Ok(ast::BitType)
}
Keyword(Kw::Logic) => {
p.bump();
Ok(ast::LogicType)
}
Keyword(Kw::Reg) => {
p.bump();
Ok(ast::RegType)
}
Keyword(Kw::Byte) => {
p.bump();
Ok(ast::ByteType)
}
Keyword(Kw::Shortint) => {
p.bump();
Ok(ast::ShortIntType)
}
Keyword(Kw::Int) => {
p.bump();
Ok(ast::IntType)
}
Keyword(Kw::Longint) => {
p.bump();
Ok(ast::LongIntType)
}
Keyword(Kw::Integer) => {
p.bump();
Ok(ast::IntegerType)
}
Keyword(Kw::Time) => {
p.bump();
Ok(ast::TimeType)
}
Keyword(Kw::Shortreal) => {
p.bump();
Ok(ast::ShortRealType)
}
Keyword(Kw::Real) => {
p.bump();
Ok(ast::RealType)
}
Keyword(Kw::Realtime) => {
p.bump();
Ok(ast::RealtimeType)
}
Keyword(Kw::Enum) => parse_enum_type(p),
Keyword(Kw::Struct) | Keyword(Kw::Union) => parse_struct_type(p),
Ident(n) if &*n.as_str() == "mailbox" => {
p.bump();
Ok(ast::MailboxType)
}
Ident(n) | EscIdent(n) => {
p.bump();
Ok(ast::NamedType(Spanned::new(n, sp)))
}
Keyword(Kw::Virtual) => {
p.bump();
p.try_eat(Keyword(Kw::Interface));
let (name, _) = p.eat_ident("virtual interface name")?;
Ok(ast::VirtIntfType(name))
}
Keyword(Kw::Type) => {
p.bump();
let arg = flanked(p, Paren, |p| parse_type_or_expr(p, &[CloseDelim(Paren)]))?;
Ok(ast::TypeRef(Box::new(arg)))
}
_ => {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("expected type").span(q));
return Err(());
}
}
}
fn parse_enum_type<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<TypeKindData<'n>> {
let mut span = p.peek(0).1;
p.bump();
let base = if p.peek(0).0 != OpenDelim(Brace) {
Some(Box::new(parse_data_type(p)?))
} else {
None
};
let names = flanked(p, Brace, |p| {
comma_list(p, CloseDelim(Brace), "enum name", parse_enum_name)
})?;
span.expand(p.last_span());
Ok(ast::EnumType(ast::Enum::new(
span,
ast::EnumData {
base_type: base,
variants: names,
},
)))
}
fn parse_enum_name<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<EnumName<'n>> {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "enum name")?;
let range = try_flanked(p, Brack, parse_expr)?;
let value = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(EnumName::new(span, EnumNameData { name, range, value }))
}
fn parse_struct_type<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<TypeKindData<'n>> {
let mut span = p.peek(0).1;
let kind = match (p.peek(0).0, p.peek(1).0) {
(Keyword(Kw::Struct), _) => {
p.bump();
StructKind::Struct
}
(Keyword(Kw::Union), Keyword(Kw::Tagged)) => {
p.bump();
p.bump();
StructKind::TaggedUnion
}
(Keyword(Kw::Union), _) => {
p.bump();
StructKind::Union
}
_ => {
p.add_diag(
DiagBuilder2::error("expected `struct`, `union`, or `union tagged`").span(span),
);
return Err(());
}
};
let (packed, signing) = if p.try_eat(Keyword(Kw::Packed)) {
(true, parse_signing(p))
} else {
(false, TypeSign::None)
};
if p.peek(0).0 == OpenDelim(Brace) {
let members = flanked(p, Brace, |p| {
repeat_until(p, CloseDelim(Brace), parse_struct_member)
})?;
span.expand(p.last_span());
Ok(ast::StructType(ast::Struct::new(
span,
ast::StructData {
kind: kind,
packed: packed,
signing: signing,
members: members,
},
)))
} else {
span.expand(p.last_span());
let struct_type = ast::StructType(ast::Struct::new(
span,
ast::StructData {
kind: kind,
packed: packed,
signing: signing,
members: Vec::default(),
},
));
let forward_type = ast::ForwardType {
kind: Box::new(TypeKind::new(span, struct_type)),
};
Ok(forward_type)
}
}
fn parse_struct_member<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<StructMember<'n>> {
let mut span = p.peek(0).1;
let rand_qualifier = match p.peek(0).0 {
Keyword(Kw::Rand) => {
p.bump();
Some(RandomQualifier::Rand)
}
Keyword(Kw::Randc) => {
p.bump();
Some(RandomQualifier::Randc)
}
_ => None,
};
let ty = parse_data_type(p)?;
let names = comma_list_nonempty(p, Semicolon, "member name", parse_variable_decl_assignment)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ast::StructMember::new(
span,
ast::StructMemberData {
rand_qualifier,
ty: Box::new(ty),
names,
},
))
}
fn try_signing<'n>(p: &mut dyn AbstractParser<'n>) -> Option<TypeSign> {
match p.peek(0).0 {
Keyword(Kw::Signed) => {
p.bump();
Some(TypeSign::Signed)
}
Keyword(Kw::Unsigned) => {
p.bump();
Some(TypeSign::Unsigned)
}
_ => None,
}
}
fn parse_signing<'n>(p: &mut dyn AbstractParser<'n>) -> TypeSign {
try_signing(p).unwrap_or(TypeSign::None)
}
fn parse_optional_dimensions<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<(Vec<TypeDim<'n>>, Span)> {
let mut v = Vec::new();
let mut span;
if let Some((d, sp)) = try_dimension(p)? {
span = sp;
v.push(d);
} else {
return Ok((v, INVALID_SPAN));
}
while let Some((d, sp)) = try_dimension(p)? {
v.push(d);
span.expand(sp);
}
Ok((v, span))
}
fn try_dimension<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Option<(TypeDim<'n>, Span)>> {
let mut span = Span::from(p.peek(0).1.begin());
let dim = try_flanked(p, Brack, |p| {
Ok(match p.peek(0).0 {
CloseDelim(Brack) => TypeDim::Unsized,
Operator(Op::Mul) => {
p.bump();
TypeDim::Associative(None)
}
Dollar => {
p.bump();
let expr = if p.try_eat(Colon) {
Some(parse_expr(p)?)
} else {
None
};
TypeDim::Queue(expr)
}
_ => match parse_type_or_expr(p, &[Colon, CloseDelim(Brack)])? {
TypeOrExpr::Type(ty) => TypeDim::Associative(Some(ty.clone())),
TypeOrExpr::Expr(expr) => {
if p.try_eat(Colon) {
let other = parse_expr(p)?;
TypeDim::Range(expr.clone(), other.clone())
} else {
TypeDim::Expr(expr.clone())
}
}
},
})
})?;
span.expand(p.last_span());
Ok(dim.map(|dim| (dim, span)))
}
fn parse_list_of_port_connections<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Vec<PortConn<'n>>> {
comma_list(p, CloseDelim(Paren), "list of port connections", |p| {
let mut span = p.peek(0).1;
let kind = if p.try_eat(Period) {
if p.try_eat(Operator(Op::Mul)) {
ast::PortConnData::Auto
} else {
let name = parse_identifier_name(p, "port name")?;
let mode = try_flanked(p, Paren, |p| {
Ok(if p.peek(0).0 != CloseDelim(Paren) {
ast::PortConnMode::Connected(parse_expr(p)?)
} else {
ast::PortConnMode::Unconnected
})
})?
.unwrap_or(ast::PortConnMode::Auto);
ast::PortConnData::Named(name, mode)
}
} else {
ast::PortConnData::Positional(parse_expr(p)?)
};
span.expand(p.last_span());
Ok(ast::PortConn::new(span, kind))
})
}
fn parse_type_or_expr<'n>(
p: &mut dyn AbstractParser<'n>,
terminators: &[Token],
) -> ReportedResult<ast::TypeOrExpr<'n>> {
let terminators = Vec::from(terminators);
let mut pp = ParallelParser::new();
pp.add_greedy("expression", |p| {
let expr = parse_expr(p)?;
p.anticipate(&terminators)?;
Ok(ast::TypeOrExpr::Expr(p.arena().alloc(expr)))
});
pp.add("type", |p| {
let ty = parse_explicit_type(p)?;
p.anticipate(&terminators)?;
Ok(ast::TypeOrExpr::Type(p.arena().alloc(ty)))
});
pp.finish(p, "type or expression")
}
fn parse_expr<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Expr<'n>> {
parse_expr_prec(p, Precedence::Min)
}
fn parse_expr_prec<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: Precedence,
) -> ReportedResult<Expr<'n>> {
if p.try_eat(Keyword(Kw::New)) {
let mut span = p.last_span();
if let Some(dim_expr) = try_flanked(p, Brack, parse_expr)? {
let expr = try_flanked(p, Paren, parse_expr)?;
span.expand(p.last_span());
return Ok(Expr::new(
span,
ArrayNewExpr(Box::new(dim_expr), expr.map(|x| Box::new(x))),
));
} else {
if let Some(args) = try_flanked(p, Paren, parse_call_args)? {
span.expand(p.last_span());
return Ok(Expr::new(span, ConstructorCallExpr(args)));
} else {
let mut bp = BranchParser::new(p);
let expr = match parse_expr(&mut bp) {
Ok(x) => {
bp.commit();
Some(Box::new(x))
}
Err(_) => None,
};
span.expand(p.last_span());
return Ok(Expr::new(span, ClassNewExpr(expr)));
}
}
}
{
let mut bp = BranchParser::new(p);
let mut span = bp.peek(0).1;
let ty = parse_explicit_type(&mut bp);
match (ty, bp.peek(0).0, bp.peek(1).0) {
(Ok(ty), Apostrophe, OpenDelim(Paren)) => {
bp.commit();
p.require_reported(Apostrophe)?;
let expr = flanked(p, Paren, parse_expr)?;
span.expand(p.last_span());
let cast = Expr::new(span, CastExpr(ty, Box::new(expr)));
return parse_expr_suffix(p, cast, precedence);
}
(Ok(ty), Apostrophe, OpenDelim(Brace)) => {
bp.commit();
let expr = parse_expr(p)?;
span.expand(p.last_span());
let cast = Expr::new(span, CastExpr(ty, Box::new(expr)));
return parse_expr_suffix(p, cast, precedence);
}
_ => (),
}
}
if let Some(sign) = try_signing(p) {
let mut span = p.last_span();
let sign = Spanned::new(sign, span);
p.require_reported(Apostrophe)?;
let expr = flanked(p, Paren, parse_expr)?;
span.expand(p.last_span());
let cast = Expr::new(span, CastSignExpr(sign, Box::new(expr)));
return parse_expr_suffix(p, cast, precedence);
}
let q = p.peek(0).1;
let prefix = parse_expr_first(p, precedence)?;
parse_expr_suffix(p, prefix, precedence)
}
fn parse_expr_suffix<'n>(
p: &mut dyn AbstractParser<'n>,
prefix: Expr<'n>,
precedence: Precedence,
) -> ReportedResult<Expr<'n>> {
let (tkn, sp) = p.peek(0);
match tkn {
OpenDelim(Brack) if precedence <= Precedence::Postfix => {
p.bump();
let expr = match parse_range_expr(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Brack)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Brack))?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
IndexExpr {
indexee: Box::new(prefix),
index: Box::new(expr),
},
);
return parse_expr_suffix(p, expr, precedence);
}
OpenDelim(Paren) if precedence <= Precedence::Postfix => {
if let ast::SysIdentExpr(name) = prefix.data {
match try_builtin_system_task(p, name, prefix.span)? {
Some(expr) => return parse_expr_suffix(p, expr, precedence),
None => (),
}
}
let args = flanked(p, Paren, parse_call_args)?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
CallExpr(Box::new(prefix), args),
);
return parse_expr_suffix(p, expr, precedence);
}
Period if precedence <= Precedence::Scope => {
p.bump();
let name = parse_identifier_name(p, "member name")?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
MemberExpr {
expr: Box::new(prefix),
name,
},
);
return parse_expr_suffix(p, expr, precedence);
}
Namespace if precedence <= Precedence::Scope => {
p.bump();
let ident = parse_identifier_name(p, "scope name")?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
ScopeExpr(Box::new(prefix), ident),
);
return parse_expr_suffix(p, expr, precedence);
}
Operator(Op::Inc) if precedence <= Precedence::Unary => {
p.bump();
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
UnaryExpr {
op: Op::Inc,
expr: Box::new(prefix),
postfix: true,
},
);
return parse_expr_suffix(p, expr, precedence);
}
Operator(Op::Dec) if precedence <= Precedence::Unary => {
p.bump();
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
UnaryExpr {
op: Op::Dec,
expr: Box::new(prefix),
postfix: true,
},
);
return parse_expr_suffix(p, expr, precedence);
}
Ternary if precedence < Precedence::Ternary => {
p.bump();
let true_expr = parse_expr_prec(p, Precedence::Ternary)?;
p.require_reported(Colon)?;
let false_expr = parse_expr_prec(p, Precedence::Ternary)?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
TernaryExpr {
cond: Box::new(prefix),
true_expr: Box::new(true_expr),
false_expr: Box::new(false_expr),
},
);
return parse_expr_suffix(p, expr, precedence);
}
Keyword(Kw::Inside) if precedence <= Precedence::Relational => {
p.bump();
let set = flanked(p, Brace, |p| {
comma_list_nonempty(p, CloseDelim(Brace), "range", |p| {
if p.peek(0).0 == OpenDelim(Brack) {
p.require_reported(OpenDelim(Brack))?;
let mut sp = p.last_span();
let lo = parse_expr(p)?;
p.require_reported(Colon)?;
let hi = parse_expr(p)?;
p.require_reported(CloseDelim(Brack))?;
sp.expand(p.last_span());
Ok(ValueRange::Range { lo, hi, span: sp })
} else {
Ok(ValueRange::Single(parse_expr(p)?))
}
})
})?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
InsideExpr(Box::new(prefix), set),
);
return parse_expr_suffix(p, expr, precedence);
}
Apostrophe if precedence <= Precedence::Postfix => {
p.bump();
let inner = flanked(p, Paren, |p| parse_expr(p))?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
CastSizeExpr(Box::new(prefix), Box::new(inner)),
);
return parse_expr_suffix(p, expr, precedence);
}
_ => (),
}
if let Some(op) = as_assign_operator(tkn) {
if precedence <= Precedence::Assignment {
p.bump();
let rhs = parse_expr_prec(p, Precedence::Assignment)?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
AssignExpr {
op: op,
lhs: Box::new(prefix),
rhs: Box::new(rhs),
},
);
return parse_expr_suffix(p, expr, precedence);
}
}
if let Some(op) = as_binary_operator(tkn) {
let prec = op.get_precedence();
if precedence < prec {
p.bump();
let rhs = parse_expr_prec(p, prec)?;
let expr = Expr::new(
Span::union(prefix.span, p.last_span()),
BinaryExpr {
op: op,
lhs: Box::new(prefix),
rhs: Box::new(rhs),
},
);
return parse_expr_suffix(p, expr, precedence);
}
}
Ok(prefix)
}
fn parse_expr_first<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: Precedence,
) -> ReportedResult<Expr<'n>> {
let first = p.peek(0).1;
match p.peek(0) {
(Operator(Op::Inc), _) if precedence <= Precedence::Unary => {
p.bump();
let expr = parse_expr_prec(p, Precedence::Unary)?;
return Ok(Expr::new(
Span::union(first, p.last_span()),
UnaryExpr {
op: Op::Inc,
expr: Box::new(expr),
postfix: false,
},
));
}
(Operator(Op::Dec), _) if precedence <= Precedence::Unary => {
p.bump();
let expr = parse_expr_prec(p, Precedence::Unary)?;
return Ok(Expr::new(
Span::union(first, p.last_span()),
UnaryExpr {
op: Op::Dec,
expr: Box::new(expr),
postfix: false,
},
));
}
(Keyword(Kw::Tagged), sp) => {
p.add_diag(DiagBuilder2::error("Tagged union expressions not implemented").span(sp));
return Err(());
}
_ => (),
}
if let Some(op) = as_unary_operator(p.peek(0).0) {
p.bump();
let expr = parse_expr_prec(p, Precedence::Unary)?;
return Ok(Expr::new(
Span::union(first, p.last_span()),
UnaryExpr {
op: op,
expr: Box::new(expr),
postfix: false,
},
));
}
parse_primary_expr(p)
}
fn parse_primary_expr<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Expr<'n>> {
let (tkn, sp) = p.peek(0);
match tkn {
Literal(lit) => {
p.bump();
return Ok(Expr::new(sp, LiteralExpr(lit)));
}
Ident(n) | EscIdent(n) => {
p.bump();
return Ok(Expr::new(sp, IdentExpr(Spanned::new(n, sp))));
}
SysIdent(n) => {
p.bump();
return Ok(Expr::new(sp, SysIdentExpr(Spanned::new(n, sp))));
}
Keyword(Kw::This) => {
p.bump();
return Ok(Expr::new(sp, ThisExpr));
}
Dollar => {
p.bump();
return Ok(Expr::new(sp, DollarExpr));
}
Keyword(Kw::Null) => {
p.bump();
return Ok(Expr::new(sp, NullExpr));
}
OpenDelim(Brace) => {
p.bump();
if p.try_eat(CloseDelim(Brace)) {
return Ok(Expr::new(Span::union(sp, p.last_span()), EmptyQueueExpr));
}
let data = match parse_concat_expr(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Brace)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Brace))?;
return Ok(Expr::new(Span::union(sp, p.last_span()), data));
}
OpenDelim(Paren) => {
p.bump();
let expr = match parse_primary_parenthesis(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Paren))?;
return Ok(expr);
}
Apostrophe => {
p.bump();
let fields = flanked(p, Brace, |p| {
comma_list_nonempty(p, CloseDelim(Brace), "pattern field", parse_pattern_field)
})?;
return Ok(Expr::new(
Span::union(sp, p.last_span()),
PatternExpr(fields),
));
}
tkn => {
p.add_diag(
DiagBuilder2::error(format!("expected expression, found `{}` instead", tkn))
.span(sp),
);
return Err(());
}
}
}
fn parse_pattern_field<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<PatternField<'n>> {
let mut span = p.peek(0).1;
if p.try_eat(Keyword(Kw::Default)) {
p.require_reported(Colon)?;
let value = Box::new(parse_expr(p)?);
span.expand(p.last_span());
return Ok(PatternField::new(span, PatternFieldData::Default(value)));
}
let mut pp = ParallelParser::new();
pp.add_greedy("expression pattern", |p| {
let expr = Box::new(parse_expr(p)?);
p.require_reported(Colon)?;
let value = Box::new(parse_expr(p)?);
Ok(PatternFieldData::Member(expr, value))
});
pp.add_greedy("type pattern", |p| {
let ty = parse_explicit_type(p)?;
p.require_reported(Colon)?;
let value = Box::new(parse_expr(p)?);
Ok(PatternFieldData::Type(ty, value))
});
pp.add("expression or repeat pattern", |p| {
let expr = Box::new(parse_expr(p)?);
let data = if let Some(inner_exprs) = try_flanked(p, Brace, |p| {
comma_list(p, CloseDelim(Brace), "expression", parse_expr)
})? {
PatternFieldData::Repeat(expr, inner_exprs)
} else {
PatternFieldData::Expr(expr)
};
p.anticipate(&[Comma, CloseDelim(Brace)])?;
Ok(data)
});
let data = pp.finish(p, "expression pattern")?;
span.expand(p.last_span());
Ok(PatternField::new(span, data))
}
pub enum StreamDir {
In,
Out,
}
fn parse_concat_expr<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ExprData<'n>> {
let stream = match p.peek(0).0 {
Operator(Op::LogicShL) => Some(StreamDir::Out),
Operator(Op::LogicShR) => Some(StreamDir::In),
_ => None,
};
if let Some(dir) = stream {
p.bump();
let slice_size = if p.peek(0).0 != OpenDelim(Brace) {
let mut pp = ParallelParser::new();
pp.add_greedy("slice size expression", |p| {
let s = parse_expr(p).map(|e| StreamConcatSlice::Expr(Box::new(e)))?;
p.anticipate(&[OpenDelim(Brace)])?;
Ok(s)
});
pp.add_greedy("slice size type", |p| {
let s = parse_explicit_type(p).map(|t| StreamConcatSlice::Type(t))?;
p.anticipate(&[OpenDelim(Brace)])?;
Ok(s)
});
Some(pp.finish(p, "slice size expression or type")?)
} else {
None
};
let exprs = flanked(p, Brace, |p| {
comma_list_nonempty(p, CloseDelim(Brace), "stream expression", |p| {
let expr = Box::new(parse_expr(p)?);
let range = if p.try_eat(Keyword(Kw::With)) {
Some(Box::new(flanked(p, Brack, parse_range_expr)?))
} else {
None
};
Ok(StreamExpr {
expr: expr,
range: range,
})
})
})?;
return Ok(StreamConcatExpr {
slice: slice_size,
exprs: exprs,
});
}
let first_expr = parse_expr_prec(p, Precedence::Concatenation)?;
if p.try_eat(OpenDelim(Brace)) {
let exprs = match parse_expr_list(p) {
Ok(x) => x,
Err(e) => {
p.recover_balanced(&[CloseDelim(Brace)], true);
return Err(e);
}
};
p.require_reported(CloseDelim(Brace))?;
return Ok(ConcatExpr {
repeat: Some(Box::new(first_expr)),
exprs: exprs,
});
}
let mut exprs = Vec::new();
exprs.push(first_expr);
while p.try_eat(Comma) {
if p.peek(0).0 == CloseDelim(Brace) {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::warning("superfluous trailing comma").span(q));
break;
}
exprs.push(parse_expr_prec(p, Precedence::Min)?);
}
Ok(ConcatExpr {
repeat: None,
exprs: exprs,
})
}
fn parse_expr_list<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Vec<Expr<'n>>> {
let mut v = Vec::new();
loop {
v.push(parse_expr_prec(p, Precedence::Min)?);
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.peek(0).0 == CloseDelim(Brace) {
p.add_diag(DiagBuilder2::warning("superfluous trailing comma").span(sp));
break;
}
}
(CloseDelim(Brace), _) => break,
(_, sp) => {
p.add_diag(DiagBuilder2::error("expected , or } after expression").span(sp));
return Err(());
}
}
}
Ok(v)
}
fn parse_primary_parenthesis<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Expr<'n>> {
let first = parse_expr_prec(p, Precedence::Min)?;
if p.try_eat(Colon) {
let typ = parse_expr_prec(p, Precedence::Min)?;
p.require_reported(Colon)?;
let max = parse_expr_prec(p, Precedence::Min)?;
Ok(Expr::new(
Span::union(first.span, max.span),
MinTypMaxExpr {
min: Box::new(first),
typ: Box::new(typ),
max: Box::new(max),
},
))
} else {
Ok(first)
}
}
fn parse_range_expr<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Expr<'n>> {
let mut span = p.peek(0).1;
let first_expr = parse_expr(p)?;
let mode = match p.peek(0).0 {
Colon => RangeMode::Absolute,
AddColon => RangeMode::RelativeUp,
SubColon => RangeMode::RelativeDown,
_ => return Ok(first_expr),
};
p.bump(); let second_expr = parse_expr(p)?;
span.expand(p.last_span());
Ok(Expr::new(
span,
RangeExpr {
mode: mode,
lhs: Box::new(first_expr),
rhs: Box::new(second_expr),
},
))
}
fn as_unary_operator(tkn: Token) -> Option<Op> {
if let Operator(op) = tkn {
match op {
Op::Add
| Op::Sub
| Op::LogicNot
| Op::BitNot
| Op::BitAnd
| Op::BitNand
| Op::BitOr
| Op::BitNor
| Op::BitXor
| Op::BitNxor
| Op::BitXnor => Some(op),
_ => None,
}
} else {
None
}
}
fn as_binary_operator(tkn: Token) -> Option<Op> {
if let Operator(op) = tkn {
match op {
Op::Add
| Op::Sub
| Op::Mul
| Op::Div
| Op::Mod
| Op::LogicEq
| Op::LogicNeq
| Op::CaseEq
| Op::CaseNeq
| Op::WildcardEq
| Op::WildcardNeq
| Op::LogicAnd
| Op::LogicOr
| Op::Pow
| Op::Lt
| Op::Leq
| Op::Gt
| Op::Geq
| Op::BitAnd
| Op::BitNand
| Op::BitOr
| Op::BitNor
| Op::BitXor
| Op::BitXnor
| Op::BitNxor
| Op::LogicShL
| Op::LogicShR
| Op::ArithShL
| Op::ArithShR
| Op::LogicImpl
| Op::LogicEquiv => Some(op),
_ => None,
}
} else {
None
}
}
fn as_assign_operator(tkn: Token) -> Option<AssignOp> {
match tkn {
Operator(Op::Assign) => Some(AssignOp::Identity),
Operator(Op::AssignAdd) => Some(AssignOp::Add),
Operator(Op::AssignSub) => Some(AssignOp::Sub),
Operator(Op::AssignMul) => Some(AssignOp::Mul),
Operator(Op::AssignDiv) => Some(AssignOp::Div),
Operator(Op::AssignMod) => Some(AssignOp::Mod),
Operator(Op::AssignBitAnd) => Some(AssignOp::BitAnd),
Operator(Op::AssignBitOr) => Some(AssignOp::BitOr),
Operator(Op::AssignBitXor) => Some(AssignOp::BitXor),
Operator(Op::AssignLogicShL) => Some(AssignOp::LogicShL),
Operator(Op::AssignLogicShR) => Some(AssignOp::LogicShR),
Operator(Op::AssignArithShL) => Some(AssignOp::ArithShL),
Operator(Op::AssignArithShR) => Some(AssignOp::ArithShR),
_ => None,
}
}
fn parse_port_list<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Vec<Port<'n>>> {
let mut v = Vec::new();
if p.try_eat(CloseDelim(Paren)) {
return Ok(v);
}
loop {
match parse_port(p) {
Ok(x) => v.push(x),
Err(()) => p.recover_balanced(&[Comma, CloseDelim(Paren)], false),
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.peek(0).0 == CloseDelim(Paren) {
p.add_diag(DiagBuilder2::warning("superfluous trailing comma").span(sp));
break;
}
}
(CloseDelim(Paren), _) => break,
(_, sp) => {
p.add_diag(DiagBuilder2::error("expected , or ) after port").span(sp));
p.recover_balanced(&[CloseDelim(Paren)], false);
break;
}
}
}
p.require_reported(CloseDelim(Paren))?;
Ok(v)
}
fn parse_port<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::Port<'n>> {
let mut pp = ParallelParser::new();
pp.add_greedy("interface port", parse_interface_port);
pp.add_greedy("explicit port", parse_explicit_port);
pp.add_greedy("named port", parse_named_port);
pp.add_greedy("implicit port", parse_implicit_port);
pp.finish(p, "port")
}
fn parse_interface_port<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::Port<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Interface))?;
let modport = if p.try_eat(Period) {
Some(parse_identifier_name(p, "modport name")?)
} else {
None
};
let name = parse_identifier_name(p, "port name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[CloseDelim(Paren), Comma])?;
span.expand(p.last_span());
Ok(ast::Port::new(
span,
ast::PortData::Intf {
modport,
name,
dims,
expr,
},
))
}
fn parse_explicit_port<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::Port<'n>> {
let mut span = p.peek(0).1;
let dir = as_port_direction(p.peek(0).0);
if dir.is_some() {
p.bump();
}
p.require_reported(Period)?;
let name = parse_identifier_name(p, "port name")?;
let expr = flanked(p, Paren, |p| {
if p.peek(0).0 == CloseDelim(Paren) {
Ok(None)
} else {
Ok(Some(parse_expr(p)?))
}
})?;
p.anticipate(&[CloseDelim(Paren), Comma])?;
span.expand(p.last_span());
Ok(ast::Port::new(
span,
ast::PortData::Explicit { dir, name, expr },
))
}
fn parse_named_port<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::Port<'n>> {
let mut span = p.peek(0).1;
let dir = as_port_direction(p.peek(0).0);
if dir.is_some() {
p.bump();
}
let kind = {
let tkn = p.peek(0).0;
if let Some(ty) = as_net_type(tkn) {
p.bump();
Some(ast::VarKind::Net {
ty,
kind: ast::NetKind::None,
})
} else if tkn == Keyword(Kw::Var) {
p.bump();
Some(ast::VarKind::Var)
} else {
None
}
};
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, (name, dims, expr)) = pp.finish(p, "explicit or implicit type")?;
fn tail<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<(Spanned<Name>, Vec<ast::TypeDim<'n>>, Option<ast::Expr<'n>>)> {
let name = parse_identifier_name(p, "port name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[CloseDelim(Paren), Comma])?;
Ok((name, dims, expr))
}
span.expand(p.last_span());
Ok(ast::Port::new(
span,
ast::PortData::Named {
dir,
kind,
ty,
name,
dims,
expr,
},
))
}
fn parse_implicit_port<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::Port<'n>> {
let mut span = p.peek(0).1;
let expr = parse_expr(p)?;
span.expand(p.last_span());
Ok(ast::Port::new(span, ast::PortData::Implicit(expr)))
}
fn parse_parameter_assignments<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Vec<ast::ParamAssignment<'n>>> {
flanked(p, Paren, |p| {
comma_list(
p,
CloseDelim(Paren),
"parameter assignment",
parse_parameter_assignment,
)
})
}
fn parse_parameter_assignment<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<ast::ParamAssignment<'n>> {
let mut span = p.peek(0).1;
let terms = [Comma, CloseDelim(Paren)];
let (name, expr) = if p.try_eat(Period) {
let name = parse_identifier(p, "parameter name")?;
let expr = flanked(p, Paren, |p| parse_type_or_expr(p, &terms))?;
(Some(name), expr)
} else {
(None, parse_type_or_expr(p, &terms)?)
};
span.expand(p.last_span());
Ok(ast::ParamAssignment {
span: span,
name: name,
expr: expr,
})
}
fn parse_procedure<'n>(
p: &mut dyn AbstractParser<'n>,
kind: ProcedureKind,
) -> ReportedResult<Procedure<'n>> {
p.bump();
let mut span = p.last_span();
let stmt = parse_stmt(p)?;
span.expand(p.last_span());
Ok(Procedure::new(
span,
ProcedureData {
kind: kind,
stmt: stmt,
},
))
}
fn parse_subroutine_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<SubroutineDecl<'n>> {
let mut span = p.peek(0).1;
let prototype = parse_subroutine_prototype(p)?;
let term = match prototype.kind {
SubroutineKind::Func => Keyword(Kw::Endfunction),
SubroutineKind::Task => Keyword(Kw::Endtask),
};
let items = repeat_until(p, term, parse_subroutine_item)?;
p.require_reported(term)?;
if p.try_eat(Colon) {
p.eat_ident("function/task name")?;
}
span.expand(p.last_span());
Ok(SubroutineDecl::new(
span,
SubroutineDeclData { prototype, items },
))
}
fn parse_subroutine_kind<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<SubroutineKind> {
let span = p.peek(0).1;
match p.peek(0).0 {
Keyword(Kw::Function) => {
p.bump();
Ok(SubroutineKind::Func)
}
Keyword(Kw::Task) => {
p.bump();
Ok(SubroutineKind::Task)
}
_ => {
p.add_diag(DiagBuilder2::error("expected `function` or `task`").span(span));
Err(())
}
}
}
fn parse_subroutine_prototype<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<SubroutinePrototype<'n>> {
let mut span = p.peek(0).1;
let kind = parse_subroutine_kind(p)?;
let lifetime = as_lifetime(p.peek(0).0);
if lifetime.is_some() {
p.bump();
}
let (retty, (name, args)) = if kind == SubroutineKind::Func {
if p.peek(0).0 == Keyword(Kw::New) {
(None, parse_subroutine_prototype_tail(p)?)
} else {
let mut pp = ParallelParser::new();
pp.add("implicit function return type", |p| {
let ty = parse_implicit_type(p)?;
Ok((Some(ty), parse_subroutine_prototype_tail(p)?))
});
pp.add("explicit function return type", |p| {
let ty = parse_explicit_type(p)?;
Ok((Some(ty), parse_subroutine_prototype_tail(p)?))
});
pp.finish(p, "implicit or explicit function return type")?
}
} else {
(None, parse_subroutine_prototype_tail(p)?)
};
span.expand(p.last_span());
Ok(SubroutinePrototype::new(
span,
SubroutinePrototypeData {
kind,
lifetime,
name,
args,
retty,
},
))
}
fn parse_subroutine_prototype_tail<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<(Spanned<Name>, Vec<SubroutinePort<'n>>)> {
let name = if p.try_eat(Keyword(Kw::New)) {
Spanned::new(get_name_table().intern("new", true), p.last_span())
} else {
parse_identifier_name(p, "function or task name")?
};
let args = try_flanked(p, Paren, |p| {
comma_list(p, CloseDelim(Paren), "subroutine port", |p| {
let mut span = p.peek(0).1;
let dir = try_subroutine_port_dir(p);
let var = p.try_eat(Keyword(Kw::Var));
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, name) = pp.finish(p, "explicit or implicit type")?;
fn tail<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Option<SubroutinePortName<'n>>> {
let data = if let Some(name) = try_identifier_name(p)? {
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
Some(SubroutinePortName {
name: name,
dims: dims,
expr: expr,
})
} else {
None
};
match p.peek(0) {
(Comma, _) | (CloseDelim(Paren), _) => Ok(data),
(_, sp) => {
p.add_diag(
DiagBuilder2::error("expected , or ) after subroutine port").span(sp),
);
Err(())
}
}
}
span.expand(p.last_span());
Ok(SubroutinePort::new(
span,
SubroutinePortData { dir, var, ty, name },
))
})
})?
.unwrap_or(Vec::new());
p.require_reported(Semicolon)?;
Ok((name, args))
}
fn try_subroutine_port_dir<'n>(p: &mut dyn AbstractParser<'n>) -> Option<SubroutinePortDir> {
match (p.peek(0).0, p.peek(1).0) {
(Keyword(Kw::Input), _) => {
p.bump();
Some(SubroutinePortDir::Input)
}
(Keyword(Kw::Output), _) => {
p.bump();
Some(SubroutinePortDir::Output)
}
(Keyword(Kw::Inout), _) => {
p.bump();
Some(SubroutinePortDir::Inout)
}
(Keyword(Kw::Ref), _) => {
p.bump();
Some(SubroutinePortDir::Ref)
}
(Keyword(Kw::Const), Keyword(Kw::Ref)) => {
p.bump();
p.bump();
Some(SubroutinePortDir::ConstRef)
}
_ => None,
}
}
fn parse_subroutine_item<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<SubroutineItem<'n>> {
let mut span = p.peek(0).1;
if let Some(dir) = try_subroutine_port_dir(p) {
let var = p.try_eat(Keyword(Kw::Var));
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"port declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok((ty, names))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"port declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok((ty, names))
});
let (ty, names) = pp.finish(p, "explicit or implicit type")?;
span.expand(p.last_span());
return Ok(SubroutineItem::PortDecl(SubroutinePortDecl {
span: span,
dir: dir,
var: var,
ty: ty,
names: names,
}));
}
Ok(SubroutineItem::Stmt(parse_stmt(p)?))
}
fn parse_stmt<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Stmt<'n>> {
let mut span = p.peek(0).1;
if p.try_eat(Semicolon) {
return Ok(Stmt::new_null(span));
}
let mut label = if p.is_ident() && p.peek(1).0 == Colon {
let (n, _) = p.eat_ident("statement label")?;
p.bump(); Some(n)
} else {
None
};
let kind = parse_stmt_kind(p, &mut label)?;
span.expand(p.last_span());
Ok(Stmt::new(span, StmtData { label, kind }))
}
fn parse_stmt_kind<'n>(
p: &mut dyn AbstractParser<'n>,
label: &mut Option<Name>,
) -> ReportedResult<StmtKind<'n>> {
let (tkn, sp) = p.peek(0);
if let Some(dc) = try_delay_control(p)? {
let stmt = Box::new(parse_stmt(p)?);
return Ok(TimedStmt(TimingControl::Delay(dc), stmt));
}
if let Some(ec) = try_event_control(p)? {
let stmt = Box::new(parse_stmt(p)?);
return Ok(TimedStmt(TimingControl::Event(ec), stmt));
}
if let Some(cd) = try_cycle_delay(p)? {
let stmt = Box::new(parse_stmt(p)?);
return Ok(TimedStmt(TimingControl::Cycle(cd), stmt));
}
Ok(match tkn {
OpenDelim(Bgend) => {
p.bump();
let (stmts, _) = parse_block(p, label, &[CloseDelim(Bgend)])?;
SequentialBlock(stmts)
}
Keyword(Kw::Fork) => {
p.bump();
let (stmts, terminator) = parse_block(
p,
label,
&[
Keyword(Kw::Join),
Keyword(Kw::JoinAny),
Keyword(Kw::JoinNone),
],
)?;
let join = match terminator {
Keyword(Kw::Join) => JoinKind::All,
Keyword(Kw::JoinAny) => JoinKind::Any,
Keyword(Kw::JoinNone) => JoinKind::None,
x => panic!("Invalid parallel block terminator {:?}", x),
};
ParallelBlock(stmts, join)
}
Keyword(Kw::Unique) => {
p.bump();
parse_if_or_case(p, Some(UniquePriority::Unique))?
}
Keyword(Kw::Unique0) => {
p.bump();
parse_if_or_case(p, Some(UniquePriority::Unique0))?
}
Keyword(Kw::Priority) => {
p.bump();
parse_if_or_case(p, Some(UniquePriority::Priority))?
}
Keyword(Kw::If) | Keyword(Kw::Case) | Keyword(Kw::Casex) | Keyword(Kw::Casez) => {
parse_if_or_case(p, None)?
}
Keyword(Kw::Forever) => {
p.bump();
let stmt = Box::new(parse_stmt(p)?);
ForeverStmt(stmt)
}
Keyword(Kw::Repeat) => {
p.bump();
let expr = flanked(p, Paren, parse_expr)?;
let stmt = Box::new(parse_stmt(p)?);
RepeatStmt(expr, stmt)
}
Keyword(Kw::While) => {
p.bump();
let expr = flanked(p, Paren, parse_expr)?;
let stmt = Box::new(parse_stmt(p)?);
WhileStmt(expr, stmt)
}
Keyword(Kw::Do) => {
p.bump();
let stmt = Box::new(parse_stmt(p)?);
let q = p.last_span();
if !p.try_eat(Keyword(Kw::While)) {
p.add_diag(DiagBuilder2::error("Do loop requires a while clause").span(q));
return Err(());
}
let expr = flanked(p, Paren, parse_expr)?;
DoStmt(stmt, expr)
}
Keyword(Kw::For) => {
p.bump();
let (init, cond, step) = flanked(p, Paren, |p| {
let init = Box::new(parse_stmt(p)?);
let cond = parse_expr(p)?;
p.require_reported(Semicolon)?;
let step = parse_expr(p)?;
Ok((init, cond, step))
})?;
let stmt = Box::new(parse_stmt(p)?);
ForStmt(init, cond, step, stmt)
}
Keyword(Kw::Foreach) => {
p.bump();
let (expr, vars) = flanked(p, Paren, |p| {
let expr = parse_expr_prec(p, Precedence::Scope)?;
let vars = flanked(p, Brack, |p| {
Ok(comma_list(p, CloseDelim(Brack), "loop variables", |p| {
Ok(if p.peek(0).0 != Comma {
Some(parse_identifier_name(p, "loop variable name")?)
} else {
None
})
})?
.into_iter()
.enumerate()
.flat_map(|(i, name)| {
name.map(|name| {
ast::ForeachIndex::new(
name.span,
ast::ForeachIndexData { index: i, name },
)
})
})
.collect())
})?;
Ok((expr, vars))
})?;
let stmt = Box::new(parse_stmt(p)?);
ForeachStmt(expr, vars, stmt)
}
Keyword(Kw::Genvar) => {
p.bump();
let names = comma_list_nonempty(p, Semicolon, "genvar declaration", parse_genvar_decl)?;
p.require_reported(Semicolon)?;
GenvarDeclStmt(names)
}
Keyword(Kw::Return) => {
p.bump();
ReturnStmt(if p.try_eat(Semicolon) {
None
} else {
let expr = parse_expr(p)?;
p.require_reported(Semicolon)?;
Some(expr)
})
}
Keyword(Kw::Break) => {
p.bump();
p.require_reported(Semicolon)?;
BreakStmt
}
Keyword(Kw::Continue) => {
p.bump();
p.require_reported(Semicolon)?;
ContinueStmt
}
Keyword(Kw::Import) => ImportStmt(parse_import_decl(p)?),
Keyword(Kw::Assert)
| Keyword(Kw::Assume)
| Keyword(Kw::Cover)
| Keyword(Kw::Expect)
| Keyword(Kw::Restrict) => AssertionStmt(Box::new(parse_assertion(p)?)),
Keyword(Kw::Wait) => {
p.bump();
match p.peek(0) {
(OpenDelim(Paren), _) => {
let expr = flanked(p, Paren, parse_expr)?;
let stmt = Box::new(parse_stmt(p)?);
WaitExprStmt(expr, stmt)
}
(Keyword(Kw::Fork), _) => {
p.bump();
p.require_reported(Semicolon)?;
WaitForkStmt
}
(tkn, sp) => {
p.add_diag(
DiagBuilder2::error(format!(
"expected (<expr>) or fork after wait, found {} instead",
tkn
))
.span(sp),
);
return Err(());
}
}
}
Keyword(Kw::WaitOrder) => {
p.add_diag(
DiagBuilder2::error("Don't know how to parse wait_order statements").span(sp),
);
return Err(());
}
Keyword(Kw::Disable) => {
p.bump();
if p.try_eat(Keyword(Kw::Fork)) {
p.require_reported(Semicolon)?;
DisableForkStmt
} else {
let (name, _) = p.eat_ident("task or block name")?;
p.require_reported(Semicolon)?;
DisableStmt(name)
}
}
_ => {
let result = {
let mut pp = ParallelParser::new();
pp.add("variable declaration", |p| {
parse_var_decl(p).map(|d| ast::VarDeclStmt(d))
});
pp.add("assign statement", |p| parse_assign_stmt(p));
pp.add("expression statement", |p| parse_expr_stmt(p));
pp.finish(p, "statement")
};
match result {
Ok(x) => x,
Err(_) => {
p.recover_balanced(&[Semicolon], true);
return Err(());
}
}
}
})
}
fn parse_block<'n>(
p: &mut dyn AbstractParser<'n>,
label: &mut Option<Name>,
terminators: &[Token],
) -> ReportedResult<(Vec<Stmt<'n>>, Token)> {
let span = p.last_span();
if p.try_eat(Colon) {
let (name, name_span) = p.eat_ident("block label")?;
if let Some(existing) = *label {
if name == existing {
p.add_diag(
DiagBuilder2::warning(format!("Block {} labelled twice", name)).span(name_span),
);
} else {
p.add_diag(
DiagBuilder2::error(format!(
"Block has been given two conflicting labels, {} and {}",
existing, name
))
.span(name_span),
);
}
} else {
*label = Some(name);
}
}
let mut v = Vec::new();
let terminator;
'outer: loop {
let tkn = p.peek(0).0;
for term in terminators {
if tkn == *term {
terminator = *term;
p.bump();
break 'outer;
}
}
match parse_stmt(p) {
Ok(x) => v.push(x),
Err(()) => {
p.recover_balanced(terminators, false);
terminator = p.peek(0).0;
p.bump();
break;
}
}
}
if p.try_eat(Colon) {
let (name, name_span) = p.eat_ident("block label")?;
if let Some(before) = *label {
if before != name {
p.add_diag(
DiagBuilder2::error(format!(
"Block label {} at end of block does not match label {} at beginning of \
block",
name, before
))
.span(name_span),
);
}
} else {
p.add_diag(
DiagBuilder2::error(format!(
"Block label {} provided at the end of the block, but not at the beginning",
name
))
.span(name_span),
);
}
}
Ok((v, terminator))
}
fn parse_continuous_assign<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ContAssign<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Assign))?;
let strength = try_flanked(p, Paren, |p| {
let span = p.peek(0).1;
match try_drive_strength(p)? {
Some(x) => Ok(x),
None => {
p.add_diag(DiagBuilder2::error("expected drive strength").span(span));
Err(())
}
}
})?;
let delay_control = try_delay_control(p)?;
let assignments = comma_list_nonempty(p, Semicolon, "continuous assignment", parse_assignment)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ast::ContAssign::new(
span,
ast::ContAssignData {
strength,
delay: None,
delay_control,
assignments,
},
))
}
fn parse_if_or_case<'n>(
p: &mut dyn AbstractParser<'n>,
up: Option<UniquePriority>,
) -> ReportedResult<StmtKind<'n>> {
let (tkn, span) = p.peek(0);
match tkn {
Keyword(Kw::Case) => {
p.bump();
parse_case(p, up, CaseKind::Normal)
}
Keyword(Kw::Casez) => {
p.bump();
parse_case(p, up, CaseKind::DontCareZ)
}
Keyword(Kw::Casex) => {
p.bump();
parse_case(p, up, CaseKind::DontCareXZ)
}
Keyword(Kw::If) => {
p.bump();
parse_if(p, up)
}
x => {
p.add_diag(
DiagBuilder2::error(format!("expected case or if statement, got {:?}", x))
.span(span),
);
Err(())
}
}
}
fn parse_case<'n>(
p: &mut dyn AbstractParser<'n>,
up: Option<UniquePriority>,
kind: CaseKind,
) -> ReportedResult<StmtKind<'n>> {
let q = p.last_span();
p.require_reported(OpenDelim(Paren))?;
let expr = match parse_expr(p) {
Ok(x) => x,
Err(()) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
return Err(());
}
};
p.require_reported(CloseDelim(Paren))?;
let mode = match p.peek(0).0 {
Keyword(Kw::Inside) => {
p.bump();
CaseMode::Inside
}
Keyword(Kw::Matches) => {
p.bump();
CaseMode::Pattern
}
_ => CaseMode::Normal,
};
let mut items = Vec::new();
while p.peek(0).0 != Keyword(Kw::Endcase) && p.peek(0).0 != Eof {
let mut span = p.peek(0).1;
if p.peek(0).0 == Keyword(Kw::Default) {
p.bump();
p.try_eat(Colon);
let stmt = Box::new(parse_stmt(p)?);
items.push(CaseItem::Default(stmt));
}
else {
let mut exprs = Vec::new();
loop {
if p.peek(0).0 == OpenDelim(Brack) {
p.require_reported(OpenDelim(Brack))?;
parse_expr(p)?;
p.require_reported(Colon)?;
parse_expr(p)?;
p.require_reported(CloseDelim(Brack))?;
} else {
match parse_expr(p) {
Ok(x) => exprs.push(x),
Err(()) => {
p.recover_balanced(&[Colon], false);
break;
}
}
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.try_eat(Colon) {
p.add_diag(
DiagBuilder2::warning("superfluous trailing comma").span(sp),
);
break;
}
}
(Colon, _) => break,
(_, sp) => {
p.add_diag(
DiagBuilder2::error("expected , or : after case expression").span(sp),
);
break;
}
}
}
p.require_reported(Colon)?;
let stmt = Box::new(parse_stmt(p)?);
items.push(CaseItem::Expr(exprs, stmt));
}
}
p.require_reported(Keyword(Kw::Endcase))?;
Ok(CaseStmt {
up: up,
kind: kind,
expr: expr,
mode: mode,
items: items,
})
}
fn parse_if<'n>(
p: &mut dyn AbstractParser<'n>,
up: Option<UniquePriority>,
) -> ReportedResult<StmtKind<'n>> {
p.require_reported(OpenDelim(Paren))?;
let cond = match parse_expr(p) {
Ok(x) => x,
Err(()) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
return Err(());
}
};
p.require_reported(CloseDelim(Paren))?;
let main_stmt = Box::new(parse_stmt(p)?);
let else_stmt = if p.peek(0).0 == Keyword(Kw::Else) {
p.bump();
Some(Box::new(parse_stmt(p)?))
} else {
None
};
Ok(IfStmt {
up: up,
cond: cond,
main_stmt: main_stmt,
else_stmt: else_stmt,
})
}
fn try_delay_control<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Option<DelayControl<'n>>> {
if !p.try_eat(Hashtag) {
return Ok(None);
}
let mut span = p.last_span();
let (tkn, sp) = p.peek(0);
let expr = match tkn {
OpenDelim(Paren) => {
p.bump();
let e = parse_expr_prec(p, Precedence::MinTypMax)?;
p.require_reported(CloseDelim(Paren))?;
e
}
Literal(Number(..)) | Literal(Time(..)) | Ident(..) => {
parse_expr_first(p, Precedence::Max)?
}
_ => {
p.add_diag(DiagBuilder2::error("expected delay value or expression after #").span(sp));
return Err(());
}
};
span.expand(p.last_span());
Ok(Some(DelayControl {
span: span,
expr: expr,
}))
}
fn try_event_control<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Option<EventControl<'n>>> {
if !p.try_eat(At) {
return Ok(None);
}
let mut span = p.last_span();
if p.peek(0).0 == Operator(Op::Mul) {
p.bump();
span.expand(p.last_span());
return Ok(Some(EventControl {
span: span,
data: EventControlData::Implicit,
}));
}
if p.peek(0).0 == OpenDelim(Paren)
&& p.peek(1).0 == Operator(Op::Mul)
&& p.peek(2).0 == CloseDelim(Paren)
{
p.bump();
p.bump();
p.bump();
span.expand(p.last_span());
return Ok(Some(EventControl {
span: span,
data: EventControlData::Implicit,
}));
}
let expr = parse_event_expr(p, EventPrecedence::Max)?;
span.expand(p.last_span());
Ok(Some(EventControl {
span: span,
data: EventControlData::Expr(expr),
}))
}
fn try_cycle_delay<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Option<CycleDelay>> {
if !p.try_eat(DoubleHashtag) {
return Ok(None);
}
let q = p.last_span();
p.add_diag(DiagBuilder2::error("Don't know how to parse cycle delay").span(q));
Err(())
}
fn parse_assignment<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<(Expr<'n>, Expr<'n>)> {
let lhs = parse_expr_prec(p, Precedence::Postfix)?;
p.require_reported(Operator(Op::Assign))?;
let rhs = parse_expr_prec(p, Precedence::Assignment)?;
Ok((lhs, rhs))
}
fn parse_assign_stmt<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<StmtKind<'n>> {
let expr = parse_expr_prec(p, Precedence::Postfix)?;
let (tkn, sp) = p.peek(0);
if let Some(op) = as_assign_operator(tkn) {
p.bump();
let rhs = parse_expr(p)?;
p.require_reported(Semicolon)?;
return Ok(BlockingAssignStmt {
lhs: expr,
rhs: rhs,
op: op,
});
}
if tkn == Operator(Op::Leq) {
p.bump();
let delay_control = try_delay_control(p)?;
let event_control = None;
let rhs = parse_expr(p)?;
p.require_reported(Semicolon)?;
return Ok(NonblockingAssignStmt {
lhs: expr,
rhs: rhs,
delay: delay_control,
event: event_control,
});
}
p.add_diag(DiagBuilder2::error("expected blocking or non-blocking assign statement").span(sp));
Err(())
}
fn parse_expr_stmt<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<StmtKind<'n>> {
let expr = parse_expr_prec(p, Precedence::Unary)?;
p.require_reported(Semicolon)?;
Ok(ExprStmt(expr))
}
fn parse_event_expr<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: EventPrecedence,
) -> ReportedResult<EventExpr<'n>> {
let mut span = p.peek(0).1;
if p.try_eat(OpenDelim(Paren)) {
return match parse_event_expr(p, EventPrecedence::Min) {
Ok(x) => {
p.require_reported(CloseDelim(Paren))?;
parse_event_expr_suffix(p, x, precedence)
}
Err(()) => {
p.recover_balanced(&[CloseDelim(Paren)], true);
Err(())
}
};
}
let edge = as_edge_ident(p.peek(0).0);
if edge != EdgeIdent::Implicit {
p.bump();
}
let value = parse_expr(p)?;
span.expand(p.last_span());
let expr = EventExpr::Edge {
span: span,
edge: edge,
value: value,
};
parse_event_expr_suffix(p, expr, precedence)
}
fn parse_event_expr_suffix<'n>(
p: &mut dyn AbstractParser<'n>,
expr: EventExpr<'n>,
precedence: EventPrecedence,
) -> ReportedResult<EventExpr<'n>> {
match p.peek(0).0 {
Keyword(Kw::Iff) if precedence < EventPrecedence::Iff => {
p.bump();
let cond = parse_expr(p)?;
Ok(EventExpr::Iff {
span: Span::union(expr.span(), cond.span),
expr: Box::new(expr),
cond: cond,
})
}
Keyword(Kw::Or) | Comma if precedence <= EventPrecedence::Or => {
p.bump();
let rhs = parse_event_expr(p, EventPrecedence::Or)?;
Ok(EventExpr::Or {
span: Span::union(expr.span(), rhs.span()),
lhs: Box::new(expr),
rhs: Box::new(rhs),
})
}
_ => Ok(expr),
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
enum EventPrecedence {
Min,
Or,
Iff,
Max,
}
fn as_edge_ident(tkn: Token) -> EdgeIdent {
match tkn {
Keyword(Kw::Edge) => EdgeIdent::Edge,
Keyword(Kw::Posedge) => EdgeIdent::Posedge,
Keyword(Kw::Negedge) => EdgeIdent::Negedge,
_ => EdgeIdent::Implicit,
}
}
fn parse_call_args<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Vec<CallArg<'n>>> {
let mut v = Vec::new();
if p.peek(0).0 == CloseDelim(Paren) {
return Ok(v);
}
loop {
match p.peek(0) {
(Comma, sp) => v.push(CallArg {
span: sp,
name_span: sp,
name: None,
expr: None,
}),
(Period, mut sp) => {
p.bump();
let (name, mut name_sp) = p.eat_ident("argument name")?;
name_sp.expand(sp);
let expr = flanked(p, Paren, |p| {
Ok(if p.peek(0).0 == CloseDelim(Paren) {
None
} else {
Some(parse_expr(p)?)
})
})?;
sp.expand(p.last_span());
v.push(CallArg {
span: sp,
name_span: name_sp,
name: Some(name),
expr: expr,
});
}
(_, mut sp) => {
let expr = parse_expr(p)?;
sp.expand(p.last_span());
v.push(CallArg {
span: sp,
name_span: sp,
name: None,
expr: Some(expr),
});
}
}
match p.peek(0) {
(Comma, sp) => {
p.bump();
if p.try_eat(CloseDelim(Paren)) {
p.add_diag(DiagBuilder2::warning("superfluous trailing comma").span(sp));
break;
}
}
(CloseDelim(Paren), _) => break,
(_, sp) => {
p.add_diag(DiagBuilder2::error("expected , or ) after call argument").span(sp));
return Err(());
}
}
}
Ok(v)
}
fn parse_variable_decl_assignment<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<VarDeclName<'n>> {
let mut span = p.peek(0).1;
let (name, name_span) = p.eat_ident("variable name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let init = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(VarDeclName::new(
span,
VarDeclNameData {
name: name,
name_span: name_span,
dims: dims,
init: init,
},
))
}
fn parse_genvar_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<GenvarDecl<'n>> {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "genvar name")?;
let init = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(GenvarDecl::new(span, GenvarDeclData { name, init }))
}
fn parse_generate_item<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Item<'n>> {
let mut span = p.peek(0).1;
let data = match p.peek(0).0 {
Keyword(Kw::For) => ItemData::GenerateFor(parse_generate_for(p)?),
Keyword(Kw::If) => ItemData::GenerateIf(parse_generate_if(p)?),
Keyword(Kw::Case) => ItemData::GenerateCase(parse_generate_case(p)?),
_ => return parse_item(p),
};
span.expand(p.last_span());
Ok(Item::new(span, data))
}
fn parse_generate_for<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<GenerateFor<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::For))?;
let (init, cond, step) = flanked(p, Paren, |p| {
let init = parse_stmt(p)?;
let cond = parse_expr(p)?;
p.require_reported(Semicolon)?;
let step = parse_expr(p)?;
Ok((init, cond, step))
})?;
let block = parse_generate_block(p)?;
span.expand(p.last_span());
Ok(GenerateFor::new(
span,
GenerateForData {
init,
cond,
step,
block,
},
))
}
fn parse_generate_if<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<GenerateIf<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::If))?;
let cond = flanked(p, Paren, parse_expr)?;
let main_block = parse_generate_block(p)?;
let else_block = if p.try_eat(Keyword(Kw::Else)) {
Some(parse_generate_block(p)?)
} else {
None
};
span.expand(p.last_span());
Ok(GenerateIf::new(
span,
GenerateIfData {
cond,
main_block,
else_block,
},
))
}
fn parse_generate_case<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<GenerateCase<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Case))?;
p.recover_balanced(&[Keyword(Kw::Endcase)], true);
if p.try_eat(Colon) {
parse_identifier_name(p, "generate block label")?;
}
span.expand(p.last_span());
p.add_diag(DiagBuilder2::error("case-generate statements not supported").span(span));
Ok(GenerateCase::new(span, GenerateCaseData {}))
}
fn parse_generate_block<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<GenerateBlock<'n>> {
let mut span = p.peek(0).1;
let mut label = if p.is_ident() && p.peek(1).0 == Colon {
let n = parse_identifier_name(p, "generate block label")?;
p.require_reported(Colon)?;
Some(n)
} else {
None
};
if !p.try_eat(OpenDelim(Bgend)) {
if label.is_some() {
let (t, q) = p.peek(0);
p.add_diag(
DiagBuilder2::error(format!(
"expected `begin` keyword after generate block label, found {} instead",
t
))
.span(q),
);
return Err(());
}
let item = parse_generate_item(p)?;
span.expand(p.last_span());
return Ok(GenerateBlock::new(
span,
GenerateBlockData {
label: label,
items: vec![item],
},
));
}
if p.try_eat(Colon) {
let n = parse_identifier_name(p, "generate block label")?;
if let Some(existing) = label {
if existing.value == n.value {
p.add_diag(
DiagBuilder2::warning(format!("Generate block {} labelled twice", n))
.span(n.span),
);
} else {
p.add_diag(
DiagBuilder2::error(format!(
"Generate block given conflicting labels {} and {}",
existing, n
))
.span(n.span),
);
return Err(());
}
} else {
label = Some(n);
}
}
let items = repeat_until(p, CloseDelim(Bgend), parse_generate_item)?;
p.require_reported(CloseDelim(Bgend))?;
if p.try_eat(Colon) {
let n = parse_identifier_name(p, "generate block label")?;
if let Some(existing) = label {
if existing.value != n.value {
p.add_diag(
DiagBuilder2::error(format!(
"Label {} given after generate block does not match label {} given before \
the block",
n, existing
))
.span(n.span),
);
return Err(());
}
} else {
p.add_diag(
DiagBuilder2::warning(format!(
"Generate block has trailing label {}, but is missing leading label",
n
))
.span(n.span),
);
}
}
span.expand(p.last_span());
Ok(GenerateBlock::new(
span,
GenerateBlockData {
label: label,
items: items,
},
))
}
fn parse_class_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ClassDecl<'n>> {
let mut span = p.peek(0).1;
let result = recovered(p, Keyword(Kw::Endclass), |p| {
let virt = p.try_eat(Keyword(Kw::Virtual));
let intf = p.try_eat(Keyword(Kw::Interface));
p.require_reported(Keyword(Kw::Class))?;
let lifetime = match as_lifetime(p.peek(0).0) {
Some(l) => {
p.bump();
l
}
None => Lifetime::Static,
};
let name = parse_identifier_name(p, "class name")?;
let params = if p.try_eat(Hashtag) {
parse_parameter_port_list(p)?
} else {
Vec::new()
};
let extends = if p.try_eat(Keyword(Kw::Extends)) {
let superclass = parse_data_type(p)?;
let args = try_flanked(p, Paren, parse_call_args)?.unwrap_or(Vec::new());
Some((superclass, args))
} else {
None
};
let impls = if p.try_eat(Keyword(Kw::Implements)) {
comma_list_nonempty(p, Semicolon, "interface class", |p| {
parse_identifier_name(p, "class name")
})?
} else {
vec![]
};
p.require_reported(Semicolon)?;
let items = repeat_until(p, Keyword(Kw::Endclass), |p| parse_class_item(p, intf))?;
Ok((virt, lifetime, name, params, extends, impls, items))
});
p.require_reported(Keyword(Kw::Endclass))?;
let (virt, lifetime, name, params, extends, impls, items) = result?;
if p.try_eat(Colon) {
let n = parse_identifier_name(p, "class name")?;
if n.value != name.value {
p.add_diag(
DiagBuilder2::error(format!(
"Class name {} disagrees with name {} given before",
n, name
))
.span(n.span),
);
return Err(());
}
}
span.expand(p.last_span());
Ok(ClassDecl::new(
span,
ClassDeclData {
virt,
lifetime,
name,
params,
extends,
impls,
items,
},
))
}
fn parse_class_item<'n>(
p: &mut dyn AbstractParser<'n>,
intf: bool,
) -> ReportedResult<ClassItem<'n>> {
let mut span = p.peek(0).1;
if p.try_eat(Semicolon) {
return Ok(ClassItem {
span,
qualifiers: vec![],
data: ClassItemData::Null,
});
}
match p.peek(0).0 {
Keyword(Kw::Localparam) | Keyword(Kw::Parameter) => {
let decl = parse_param_decl(p, false)?;
span.expand(p.last_span());
p.require_reported(Semicolon)?;
return Ok(ClassItem {
span,
qualifiers: vec![],
data: ClassItemData::ParamDecl(decl),
});
}
Keyword(Kw::Extern) => {
p.bump();
let proto = parse_subroutine_prototype(p)?;
span.expand(p.last_span());
return Ok(ClassItem {
span,
qualifiers: vec![],
data: ClassItemData::ExternSubroutine(proto),
});
}
Keyword(Kw::Typedef) => {
let def = parse_typedef(p)?;
span.expand(p.last_span());
return Ok(ClassItem {
span,
qualifiers: vec![],
data: ClassItemData::Typedef(def),
});
}
_ => (),
}
let qualifiers = parse_class_item_qualifiers(p)?;
let data = {
let mut pp = ParallelParser::new();
pp.add("class property", |p| {
let ty = parse_data_type(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"data declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok(ClassItemData::Property)
});
if intf {
pp.add("class function or task prototype", |p| {
parse_subroutine_prototype(p).map(ClassItemData::ExternSubroutine)
});
} else {
pp.add("class function or task", |p| {
parse_subroutine_decl(p).map(ClassItemData::SubroutineDecl)
});
}
pp.add("class constraint", |p| {
parse_constraint(p).map(ClassItemData::Constraint)
});
pp.finish(p, "class item")?
};
span.expand(p.last_span());
Ok(ClassItem {
span: span,
qualifiers: qualifiers,
data: data,
})
}
fn parse_class_item_qualifiers<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Vec<(ClassItemQualifier, Span)>> {
let mut v = Vec::new();
loop {
let (tkn, sp) = p.peek(0);
match tkn {
Keyword(Kw::Static) => v.push((ClassItemQualifier::Static, sp)),
Keyword(Kw::Protected) => v.push((ClassItemQualifier::Protected, sp)),
Keyword(Kw::Local) => v.push((ClassItemQualifier::Local, sp)),
Keyword(Kw::Rand) => v.push((ClassItemQualifier::Rand, sp)),
Keyword(Kw::Randc) => v.push((ClassItemQualifier::Randc, sp)),
Keyword(Kw::Pure) => v.push((ClassItemQualifier::Pure, sp)),
Keyword(Kw::Virtual) => v.push((ClassItemQualifier::Virtual, sp)),
Keyword(Kw::Const) => v.push((ClassItemQualifier::Const, sp)),
_ => break,
}
p.bump();
}
Ok(v)
}
fn parse_class_method<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ClassItem<'n>> {
println!("Parsing class method");
Err(())
}
fn parse_class_property<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ClassItem<'n>> {
println!("Parsing class property");
p.try_eat(Keyword(Kw::Rand));
Err(())
}
fn parse_constraint<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Constraint<'n>> {
let mut span = p.peek(0).1;
let kind = match p.peek(0).0 {
Keyword(Kw::Extern) => {
p.bump();
ConstraintKind::ExternProto
}
Keyword(Kw::Pure) => {
p.bump();
ConstraintKind::PureProto
}
_ => ConstraintKind::Decl,
};
let kind_span = span;
let statik = p.try_eat(Keyword(Kw::Static));
p.require_reported(Keyword(Kw::Constraint))?;
let (name, name_span) = p.eat_ident("constraint name")?;
let items = if p.try_eat(Semicolon) {
let kind = match kind {
ConstraintKind::Decl => ConstraintKind::Proto,
x => x,
};
Vec::new()
} else {
if kind == ConstraintKind::ExternProto || kind == ConstraintKind::PureProto {
p.add_diag(
DiagBuilder2::error("Only constraint prototypes can be extern or pure")
.span(kind_span),
);
return Err(());
}
flanked(p, Brace, |p| {
repeat_until(p, CloseDelim(Brace), parse_constraint_item)
})?
};
span.expand(p.last_span());
Ok(Constraint {
span: span,
kind: kind,
statik: statik,
name: name,
name_span: name_span,
items: items,
})
}
fn parse_constraint_item<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ConstraintItem<'n>> {
let mut span = p.peek(0).1;
let data = parse_constraint_item_data(p)?;
span.expand(p.last_span());
Ok(ConstraintItem {
span: span,
data: data,
})
}
fn parse_constraint_item_data<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<ConstraintItemData<'n>> {
if p.try_eat(Keyword(Kw::If)) {
let q = p.last_span();
p.add_diag(DiagBuilder2::error("Don't know how to parse `if` constraint items").span(q));
return Err(());
}
if p.try_eat(Keyword(Kw::Foreach)) {
let q = p.last_span();
p.add_diag(
DiagBuilder2::error("Don't know how to parse `foreach` constraint items").span(q),
);
return Err(());
}
let expr = parse_expr(p)?;
p.require_reported(Semicolon)?;
Ok(ConstraintItemData::Expr(expr))
}
struct ParallelParser<'a, 'n, R: Clone> {
branches: Vec<(
String,
Box<dyn FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R> + 'a>,
bool,
)>,
}
impl<'a, 'n, R: Clone> ParallelParser<'a, 'n, R> {
pub fn new() -> Self {
ParallelParser {
branches: Vec::new(),
}
}
pub fn add<F>(&mut self, name: &str, func: F)
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R> + 'a,
{
self.branches.push((name.to_owned(), Box::new(func), false));
}
pub fn add_greedy<F>(&mut self, name: &str, func: F)
where
F: FnMut(&mut dyn AbstractParser<'n>) -> ReportedResult<R> + 'a,
{
self.branches.push((name.to_owned(), Box::new(func), true));
}
pub fn finish(self, p: &mut dyn AbstractParser<'n>, msg: &str) -> ReportedResult<R> {
let (tkn, q) = p.peek(0);
let mut results = Vec::new();
let mut matched = Vec::new();
for (name, mut func, greedy) in self.branches {
let mut bp = BranchParser::new(p);
match func(&mut bp) {
Ok(x) => {
if greedy {
bp.commit();
return Ok(x);
} else {
let sp = bp.last_span();
results.push((name, bp.consumed, bp.diagnostics, x, Span::union(q, sp)));
}
}
Err(_) => matched.push((
name,
bp.consumed() - bp.skipped(),
bp.consumed(),
bp.diagnostics,
)),
}
}
if results.len() > 1 {
let mut names = String::new();
names.push_str(&results[0].0);
if results.len() == 2 {
names.push_str(" or ");
names.push_str(&results[1].0);
} else {
for &(ref name, _, _, _, _) in &results[..results.len() - 1] {
names.push_str(", ");
names.push_str(&name);
}
names.push_str(", or ");
names.push_str(&results[results.len() - 1].0);
}
p.add_diag(DiagBuilder2::fatal(format!("ambiguous code, could be {}", names)).span(q));
for &(ref name, _, _, _, span) in &results {
p.add_diag(DiagBuilder2::note(format!("{} would be this part", name)).span(span));
}
Err(())
} else if let Some(&(_, consumed, ref diagnostics, ref res, _)) = results.last() {
for d in diagnostics {
p.add_diag(d.clone());
}
for _ in 0..consumed {
p.bump();
}
Ok((*res).clone())
} else {
matched.sort_by(|a, b| (b.1).cmp(&a.1));
let highest_score = matched[0].1;
let highest_consumed = matched[0].2;
let errors = matched
.into_iter()
.take_while(|e| e.1 == highest_score)
.collect::<Vec<_>>();
let num_errors = errors.len();
if num_errors != 1 {
p.add_diag(
DiagBuilder2::error(format!("expected {}, found `{}` instead", msg, tkn))
.span(q),
);
for (name, _, _, ds) in errors {
p.add_diag(DiagBuilder2::note(format!("parsing as {}:", name)));
for d in ds {
p.add_diag(d);
}
}
} else {
for d in errors.into_iter().next().unwrap().3 {
p.add_diag(d);
}
}
for _ in 0..highest_consumed {
p.bump();
}
Err(())
}
}
}
struct BranchParser<'tp, 'n> {
parser: &'tp mut dyn AbstractParser<'n>,
consumed: usize,
skipped: usize,
diagnostics: Vec<DiagBuilder2>,
last_span: Span,
severity: Severity,
}
impl<'tp, 'n> BranchParser<'tp, 'n> {
pub fn new(parser: &'tp mut dyn AbstractParser<'n>) -> Self {
let last = parser.last_span();
BranchParser {
parser: parser,
consumed: 0,
skipped: 0,
diagnostics: Vec::new(),
last_span: last,
severity: Severity::Note,
}
}
pub fn skipped(&self) -> usize {
self.skipped
}
pub fn commit(self) {
for _ in 0..self.consumed {
self.parser.bump();
}
for d in self.diagnostics {
self.parser.add_diag(d);
}
}
}
impl<'tp, 'n> AbstractParser<'n> for BranchParser<'tp, 'n> {
fn arena(&self) -> &'n ast::Arena<'n> {
self.parser.arena()
}
fn peek(&mut self, offset: usize) -> TokenAndSpan {
self.parser.peek(self.consumed + offset)
}
fn bump(&mut self) {
self.last_span = self.parser.peek(self.consumed).1;
self.consumed += 1;
}
fn skip(&mut self) {
self.bump();
self.skipped += 1;
}
fn consumed(&self) -> usize {
self.consumed
}
fn last_span(&self) -> Span {
self.last_span
}
fn add_diag(&mut self, diag: DiagBuilder2) {
if diag.severity > self.severity {
self.severity = diag.severity;
}
self.diagnostics.push(diag);
}
fn severity(&self) -> Severity {
self.severity
}
}
fn parse_typedef<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Typedef<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Typedef))?;
{
let mut bp = BranchParser::new(p);
if bp.peek(0).0 == Keyword(Kw::Enum) {
bp.bump();
}
let name = parse_identifier_name(&mut bp, "type name");
let semi = bp.require_reported(Semicolon);
if let (Ok(name), Ok(semi)) = (name, semi) {
bp.commit();
span.expand(p.last_span());
let dims = Vec::default();
let ty = Type::new(
span,
TypeData {
kind: TypeKind::new(
span,
ast::ForwardType {
kind: Box::new(TypeKind::new(span, ImplicitType)),
},
),
sign: TypeSign::None,
dims: Vec::default(),
},
);
return Ok(Typedef::new(span, TypedefData { name, ty, dims }));
}
}
let mut ty = parse_explicit_type(p)?;
let ident_name = parse_identifier_name(p, "type name")?;
let (dims, _) = parse_optional_dimensions(p)?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(Typedef::new(
span,
TypedefData {
name: ident_name,
ty: ty,
dims: dims,
},
))
}
fn parse_port_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<PortDecl<'n>> {
let mut span = p.peek(0).1;
let dir = match as_port_direction(p.peek(0).0) {
Some(x) => {
p.bump();
x
}
None => {
p.add_diag(
DiagBuilder2::error("expected port direction (inout, input, output, or ref)")
.span(span),
);
return Err(());
}
};
let kind = if let Some(ty) = as_net_type(p.peek(0).0) {
p.bump();
Some(VarKind::Net {
ty,
kind: NetKind::None,
})
} else if p.try_eat(Keyword(Kw::Var)) {
Some(VarKind::Var)
} else {
None
};
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, names) = pp.finish(p, "explicit or implicit type")?;
fn tail<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Vec<VarDeclName<'n>>> {
let names = comma_list_nonempty(
p,
Semicolon,
"port declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok(names)
}
span.expand(p.last_span());
Ok(ast::PortDecl::new(
span,
ast::PortDeclData {
dir,
kind,
ty,
names,
},
))
}
fn as_net_type(tkn: Token) -> Option<NetType> {
match tkn {
Keyword(Kw::Supply0) => Some(NetType::Supply0),
Keyword(Kw::Supply1) => Some(NetType::Supply1),
Keyword(Kw::Tri) => Some(NetType::Tri),
Keyword(Kw::Triand) => Some(NetType::TriAnd),
Keyword(Kw::Trior) => Some(NetType::TriOr),
Keyword(Kw::Trireg) => Some(NetType::TriReg),
Keyword(Kw::Tri0) => Some(NetType::Tri0),
Keyword(Kw::Tri1) => Some(NetType::Tri1),
Keyword(Kw::Uwire) => Some(NetType::Uwire),
Keyword(Kw::Wire) => Some(NetType::Wire),
Keyword(Kw::Wand) => Some(NetType::WireAnd),
Keyword(Kw::Wor) => Some(NetType::WireOr),
_ => None,
}
}
fn parse_net_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<NetDecl<'n>> {
let mut span = p.peek(0).1;
let net_type = match as_net_type(p.peek(0).0) {
Some(x) => {
p.bump();
x
}
None => {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("expected net type").span(q));
return Err(());
}
};
let strength = try_flanked(p, Paren, parse_net_strength)?;
let kind = match p.peek(0).0 {
Keyword(Kw::Vectored) => {
p.bump();
NetKind::Vectored
}
Keyword(Kw::Scalared) => {
p.bump();
NetKind::Scalared
}
_ => NetKind::None,
};
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
let (ty, (delay, names)) = pp.finish(p, "explicit or implicit type")?;
fn tail<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<(Option<DelayControl<'n>>, Vec<VarDeclName<'n>>)> {
let delay = try_delay_control(p)?;
let names = comma_list_nonempty(
p,
Semicolon,
"net declaration",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok((delay, names))
}
span.expand(p.last_span());
Ok(ast::NetDecl::new(
span,
ast::NetDeclData {
net_type,
strength,
kind,
ty,
delay,
names,
},
))
}
fn try_drive_strength<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<Option<(DriveStrength, DriveStrength)>> {
if let Some(a) = as_drive_strength(p.peek(0).0) {
p.bump();
p.require_reported(Comma)?;
if let Some(b) = as_drive_strength(p.peek(0).0) {
p.bump();
Ok(Some((a, b)))
} else {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("expected second drive strength").span(q));
Err(())
}
} else {
Ok(None)
}
}
fn parse_net_strength<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<NetStrength> {
if let Some((a, b)) = try_drive_strength(p)? {
Ok(NetStrength::Drive(a, b))
} else if let Some(s) = as_charge_strength(p.peek(0).0) {
p.bump();
Ok(NetStrength::Charge(s))
} else {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("expected drive or charge strength").span(q));
Err(())
}
}
fn as_drive_strength(tkn: Token) -> Option<DriveStrength> {
match tkn {
Keyword(Kw::Supply0) => Some(DriveStrength::Supply0),
Keyword(Kw::Strong0) => Some(DriveStrength::Strong0),
Keyword(Kw::Pull0) => Some(DriveStrength::Pull0),
Keyword(Kw::Weak0) => Some(DriveStrength::Weak0),
Keyword(Kw::Highz0) => Some(DriveStrength::HighZ0),
Keyword(Kw::Supply1) => Some(DriveStrength::Supply1),
Keyword(Kw::Strong1) => Some(DriveStrength::Strong1),
Keyword(Kw::Pull1) => Some(DriveStrength::Pull1),
Keyword(Kw::Weak1) => Some(DriveStrength::Weak1),
Keyword(Kw::Highz1) => Some(DriveStrength::HighZ1),
_ => None,
}
}
fn as_charge_strength(tkn: Token) -> Option<ChargeStrength> {
match tkn {
Keyword(Kw::Small) => Some(ChargeStrength::Small),
Keyword(Kw::Medium) => Some(ChargeStrength::Medium),
Keyword(Kw::Large) => Some(ChargeStrength::Large),
_ => None,
}
}
fn parse_dpi_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::DpiDecl<'n>> {
let mut span = p.peek(0).1;
let data = if p.try_eat(Keyword(Kw::Import)) {
let spec = parse_string_literal(p, "DPI specifier string")?;
let property = if p.try_eat(Keyword(Kw::Context)) {
Some(Spanned::new(ast::DpiProperty::Context, p.last_span()))
} else if p.try_eat(Keyword(Kw::Pure)) {
Some(Spanned::new(ast::DpiProperty::Pure, p.last_span()))
} else {
None
};
let cident = if let Some(ident) = try_identifier_name(p)? {
p.require_reported(Operator(Op::Assign))?;
Some(ident)
} else {
None
};
let prototype = parse_subroutine_prototype(p)?;
ast::DpiDeclData::Import {
spec,
property,
cident,
prototype,
}
} else if p.try_eat(Keyword(Kw::Export)) {
let spec = parse_string_literal(p, "DPI specifier string")?;
let cident = if let Some(ident) = try_identifier_name(p)? {
p.require_reported(Operator(Op::Assign))?;
Some(ident)
} else {
None
};
let kind = parse_subroutine_kind(p)?;
let name = parse_identifier_name(p, "DPI task/function name")?;
p.require_reported(Semicolon)?;
ast::DpiDeclData::Export {
spec,
cident,
kind,
name,
}
} else {
let span = p.peek(0).1;
p.add_diag(
DiagBuilder2::error("expected `import` or `export` at the start of a DPI declaration")
.span(span),
);
return Err(());
};
span.expand(p.last_span());
Ok(ast::DpiDecl::new(span, data))
}
fn parse_import_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ImportDecl<'n>> {
let mut span = p.peek(0).1;
p.require_reported(Keyword(Kw::Import))?;
let items = comma_list_nonempty(p, Semicolon, "import item", |p| {
let mut span = p.peek(0).1;
let pkg = parse_identifier_name(p, "package name")?;
p.require_reported(Namespace)?;
let (tkn, sp) = p.peek(0);
match tkn {
Operator(Op::Mul) => {
p.bump();
span.expand(p.last_span());
Ok(ImportItem::new(span, ImportItemData { pkg, name: None }))
}
Ident(n) | EscIdent(n) => {
p.bump();
span.expand(p.last_span());
Ok(ImportItem::new(
span,
ImportItemData {
pkg,
name: Some(Spanned::new(n, sp)),
},
))
}
_ => {
p.add_diag(
DiagBuilder2::error(
"expected identifier or `*` after `::` in import declaration",
)
.span(sp),
);
Err(())
}
}
})?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ImportDecl::new(span, ImportDeclData { items }))
}
fn parse_assertion<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Assertion<'n>> {
let mut span = p.peek(0).1;
let null = get_name_table().intern("0", false);
let is_property = p.peek(1).0 == Keyword(Kw::Property);
let is_sequence = p.peek(1).0 == Keyword(Kw::Sequence);
let is_deferred_observed = p.peek(1).0 == Hashtag && p.peek(2).0 == Literal(Number(null, None));
let is_deferred_final = p.peek(1).0 == Keyword(Kw::Final);
let is_deferred = is_deferred_observed || is_deferred_final;
let deferred_mode = match is_deferred_final {
true => AssertionDeferred::Final,
false => AssertionDeferred::Observed,
};
let data = match p.peek(0).0 {
Keyword(Kw::Assert) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let action = parse_assertion_action_block(p)?;
AssertionData::Concurrent(ConcurrentAssertion::AssertProperty(prop, action))
}
Keyword(Kw::Assume) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let action = parse_assertion_action_block(p)?;
AssertionData::Concurrent(ConcurrentAssertion::AssumeProperty(prop, action))
}
Keyword(Kw::Cover) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let stmt = parse_stmt(p)?;
AssertionData::Concurrent(ConcurrentAssertion::CoverProperty(prop, stmt))
}
Keyword(Kw::Cover) if is_sequence => {
p.bump();
p.bump();
p.add_diag(DiagBuilder2::error("Don't know how to parse cover sequences").span(span));
return Err(());
}
Keyword(Kw::Expect) => {
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
let action = parse_assertion_action_block(p)?;
AssertionData::Concurrent(ConcurrentAssertion::ExpectProperty(prop, action))
}
Keyword(Kw::Restrict) if is_property => {
p.bump();
p.bump();
let prop = flanked(p, Paren, parse_property_spec)?;
AssertionData::Concurrent(ConcurrentAssertion::RestrictProperty(prop))
}
Keyword(Kw::Assert) => {
p.bump();
if is_deferred {
p.bump();
if is_deferred_observed {
p.bump();
}
}
let expr = flanked(p, Paren, parse_expr)?;
let action = parse_assertion_action_block(p)?;
let a = BlockingAssertion::Assert(expr, action);
if is_deferred {
AssertionData::Deferred(deferred_mode, a)
} else {
AssertionData::Immediate(a)
}
}
Keyword(Kw::Assume) => {
p.bump();
if is_deferred {
p.bump();
if is_deferred_observed {
p.bump();
}
}
let expr = flanked(p, Paren, parse_expr)?;
let action = parse_assertion_action_block(p)?;
let a = BlockingAssertion::Assume(expr, action);
if is_deferred {
AssertionData::Deferred(deferred_mode, a)
} else {
AssertionData::Immediate(a)
}
}
Keyword(Kw::Cover) => {
p.bump();
if is_deferred {
p.bump();
if is_deferred_observed {
p.bump();
}
}
let expr = flanked(p, Paren, parse_expr)?;
let stmt = parse_stmt(p)?;
let a = BlockingAssertion::Cover(expr, stmt);
if is_deferred {
AssertionData::Deferred(deferred_mode, a)
} else {
AssertionData::Immediate(a)
}
}
_ => {
p.add_diag(
DiagBuilder2::error("expected assert, assume, cover, expect, or restrict")
.span(span),
);
return Err(());
}
};
span.expand(p.last_span());
Ok(Assertion {
span: span,
label: None,
data: data,
})
}
fn parse_assertion_action_block<'n>(
p: &mut dyn AbstractParser<'n>,
) -> ReportedResult<AssertionActionBlock<'n>> {
if p.try_eat(Keyword(Kw::Else)) {
Ok(AssertionActionBlock::Negative(parse_stmt(p)?))
} else {
let stmt = parse_stmt(p)?;
if p.try_eat(Keyword(Kw::Else)) {
Ok(AssertionActionBlock::Both(stmt, parse_stmt(p)?))
} else {
Ok(AssertionActionBlock::Positive(stmt))
}
}
}
fn parse_property_spec<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<PropSpec> {
let mut span = p.peek(0).1;
p.recover_balanced(&[CloseDelim(Paren)], false);
return Ok(PropSpec);
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[allow(dead_code)]
enum PropSeqPrecedence {
Min,
AlEvIfAccRejSyn,
ImplFollow, Until, Iff, Or, And, NotNexttime,
Intersect, Within, Throughout, CycleDelay, Brack,
Max,
}
fn parse_propexpr<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<PropExpr<'n>> {
parse_propexpr_prec(p, PropSeqPrecedence::Min)
}
fn parse_propexpr_prec<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExpr<'n>> {
let mut span = p.peek(0).1;
let mut pp = ParallelParser::new();
pp.add_greedy("sequence expression", move |p| {
parse_propexpr_seq(p, precedence)
});
pp.add_greedy("property expression", move |p| {
parse_propexpr_nonseq(p, precedence)
});
let data = pp.finish(p, "sequence or primary property expression")?;
span.expand(p.last_span());
let expr = PropExpr {
span: span,
data: data,
};
parse_propexpr_suffix(p, expr, precedence)
}
fn parse_propexpr_nonseq<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExprData<'n>> {
match p.peek(0).0 {
OpenDelim(Paren) => return flanked(p, Paren, parse_propexpr).map(|pe| pe.data),
Keyword(Kw::Not) => {
p.bump();
let expr = parse_propexpr_prec(p, PropSeqPrecedence::NotNexttime)?;
return Ok(PropExprData::Not(Box::new(expr)));
}
At => {
p.bump();
let ev = parse_event_expr(p, EventPrecedence::Min)?;
let expr = parse_propexpr(p)?;
return Ok(PropExprData::Clocked(ev, Box::new(expr)));
}
_ => {
let q = p.peek(0).1;
p.add_diag(DiagBuilder2::error("expected primary property expression").span(q));
return Err(());
}
}
}
fn parse_propexpr_seq<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExprData<'n>> {
let (seqop, seqexpr) = match p.peek(0).0 {
Keyword(Kw::Strong) => {
p.bump();
(PropSeqOp::Strong, flanked(p, Paren, parse_seqexpr)?)
}
Keyword(Kw::Weak) => {
p.bump();
(PropSeqOp::Weak, flanked(p, Paren, parse_seqexpr)?)
}
_ => (PropSeqOp::None, parse_seqexpr_prec(p, precedence)?),
};
if precedence <= PropSeqPrecedence::ImplFollow {
if let Some(op) = match p.peek(0).0 {
Operator(Op::SeqImplOl) => Some(PropSeqBinOp::ImplOverlap),
Operator(Op::SeqImplNol) => Some(PropSeqBinOp::ImplNonoverlap),
Operator(Op::SeqFollowOl) => Some(PropSeqBinOp::FollowOverlap),
Operator(Op::SeqFollowNol) => Some(PropSeqBinOp::FollowNonoverlap),
_ => None,
} {
p.bump();
let expr = parse_propexpr_prec(p, PropSeqPrecedence::ImplFollow)?;
return Ok(PropExprData::SeqBinOp(op, seqop, seqexpr, Box::new(expr)));
}
}
Ok(PropExprData::SeqOp(seqop, seqexpr))
}
fn parse_propexpr_suffix<'n>(
p: &mut dyn AbstractParser<'n>,
prefix: PropExpr<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<PropExpr<'n>> {
if let Some((op, prec, rassoc)) = match p.peek(0).0 {
Keyword(Kw::Or) => Some((PropBinOp::Or, PropSeqPrecedence::Or, false)),
Keyword(Kw::And) => Some((PropBinOp::And, PropSeqPrecedence::And, false)),
Keyword(Kw::Until) => Some((PropBinOp::Until, PropSeqPrecedence::Until, true)),
Keyword(Kw::SUntil) => Some((PropBinOp::SUntil, PropSeqPrecedence::Until, true)),
Keyword(Kw::UntilWith) => Some((PropBinOp::UntilWith, PropSeqPrecedence::Until, true)),
Keyword(Kw::SUntilWith) => Some((PropBinOp::SUntilWith, PropSeqPrecedence::Until, true)),
Keyword(Kw::Implies) => Some((PropBinOp::Impl, PropSeqPrecedence::Until, true)),
Keyword(Kw::Iff) => Some((PropBinOp::Iff, PropSeqPrecedence::Iff, true)),
_ => None,
} {
if precedence < prec || (rassoc && precedence == prec) {
p.bump();
let rhs = parse_propexpr_prec(p, prec)?;
return Ok(PropExpr {
span: Span::union(prefix.span, rhs.span),
data: PropExprData::BinOp(op, Box::new(prefix), Box::new(rhs)),
});
}
}
Ok(prefix)
}
fn parse_seqexpr<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<SeqExpr<'n>> {
parse_seqexpr_prec(p, PropSeqPrecedence::Min)
}
fn parse_seqexpr_prec<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExpr<'n>> {
let mut span = p.peek(0).1;
let mut pp = ParallelParser::new();
pp.add_greedy("expression", move |p| parse_seqexpr_expr(p, precedence));
pp.add_greedy("sequence", move |p| parse_seqexpr_nonexpr(p, precedence));
let data = pp.finish(p, "sequence or primary property expression")?;
span.expand(p.last_span());
let expr = SeqExpr {
span: span,
data: data,
};
parse_seqexpr_suffix(p, expr, precedence)
}
fn parse_seqexpr_expr<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExprData<'n>> {
let q = p.peek(0).1;
p.add_diag(
DiagBuilder2::error(
"Don't know how to parse sequence expression that don't start with an expression",
)
.span(q),
);
Err(())
}
fn parse_seqexpr_nonexpr<'n>(
p: &mut dyn AbstractParser<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExprData<'n>> {
let expr = parse_expr(p)?;
if precedence <= PropSeqPrecedence::Throughout && p.try_eat(Keyword(Kw::Throughout)) {
let rhs = parse_seqexpr_prec(p, PropSeqPrecedence::Throughout)?;
return Ok(SeqExprData::Throughout(expr, Box::new(rhs)));
}
let rep = try_flanked(p, Brack, parse_seqrep)?;
Ok(SeqExprData::Expr(expr, rep))
}
fn parse_seqexpr_suffix<'n>(
p: &mut dyn AbstractParser<'n>,
prefix: SeqExpr<'n>,
precedence: PropSeqPrecedence,
) -> ReportedResult<SeqExpr<'n>> {
Ok(prefix)
}
fn parse_seqrep<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<SeqRep<'n>> {
match p.peek(0).0 {
Operator(Op::Mul) => {
p.bump();
if p.peek(0).0 == CloseDelim(Brack) {
Ok(SeqRep::ConsecStar)
} else {
Ok(SeqRep::Consec(parse_expr(p)?))
}
}
Operator(Op::Add) => {
p.bump();
Ok(SeqRep::ConsecPlus)
}
Operator(Op::Assign) => {
p.bump();
Ok(SeqRep::Nonconsec(parse_expr(p)?))
}
Operator(Op::LogicImpl) => {
p.bump();
Ok(SeqRep::Goto(parse_expr(p)?))
}
_ => {
let q = p.peek(0).1;
p.add_diag(
DiagBuilder2::error(
"expected sequence repetition [+], [*], [* <expr>], [= <expr>], or [-> <expr>]",
)
.span(q),
);
Err(())
}
}
}
fn parse_inst<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::Inst<'n>> {
let mut span = p.peek(0).1;
let target = parse_identifier_name(p, "module name")?;
let params = if p.try_eat(Hashtag) {
parse_parameter_assignments(p)?
} else {
Vec::new()
};
let names = comma_list_nonempty(p, Semicolon, "hierarchical instance", |p| {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "instance name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let conns = flanked(p, Paren, parse_list_of_port_connections)?;
span.expand(p.last_span());
Ok(ast::InstName::new(
span,
ast::InstNameData { name, dims, conns },
))
})?;
p.require_reported(Semicolon)?;
span.expand(p.last_span());
Ok(ast::Inst::new(
span,
ast::InstData {
target,
params,
names,
},
))
}
fn parse_var_decl<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<ast::VarDecl<'n>> {
let mut span = p.peek(0).1;
let konst = p.try_eat(Keyword(Kw::Const));
let var = p.try_eat(Keyword(Kw::Var));
let lifetime = as_lifetime(p.peek(0).0);
if lifetime.is_some() {
p.bump();
}
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
Ok((ty, tail(p)?))
});
if var {
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
Ok((ty, tail(p)?))
});
}
let (ty, names) = pp.finish(p, "explicit or implicit type")?;
fn tail<'n>(p: &mut dyn AbstractParser<'n>) -> ReportedResult<Vec<VarDeclName<'n>>> {
let names = comma_list_nonempty(
p,
Semicolon,
"variable name",
parse_variable_decl_assignment,
)?;
p.require_reported(Semicolon)?;
Ok(names)
}
span.expand(p.last_span());
Ok(ast::VarDecl::new(
span,
ast::VarDeclData {
konst: konst,
var: var,
lifetime: lifetime,
ty: ty,
names: names,
},
))
}
fn parse_param_decl<'n>(
p: &mut dyn AbstractParser<'n>,
keyword_optional: bool,
) -> ReportedResult<ast::ParamDecl<'n>> {
let mut span = p.peek(0).1;
let local = match p.peek(0) {
(Keyword(Kw::Localparam), _) => {
p.bump();
true
}
(Keyword(Kw::Parameter), _) => {
p.bump();
false
}
(_, _) if keyword_optional => false,
(tkn, sp) => {
p.add_diag(
DiagBuilder2::error(format!(
"expected `parameter` or `localparam`, but found {} instead",
tkn
))
.span(sp),
);
return Err(());
}
};
let predicate = FuncPredicate {
match_func: |p| match p.peek(0).0 {
Semicolon | CloseDelim(Paren) => true,
Comma => match p.peek(1).0 {
Keyword(Kw::Parameter) | Keyword(Kw::Localparam) => true,
_ => false,
},
_ => false,
},
recover_func: |p, consume| p.recover_balanced(&[CloseDelim(Paren), Semicolon], consume),
desc: ") or ;",
};
let kind = if p.try_eat(Keyword(Kw::Type)) {
let decls = comma_list_nonempty(p, predicate, "parameter name", |p| {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "parameter name")?;
let ty = if p.try_eat(Operator(Op::Assign)) {
Some(parse_explicit_type(p)?)
} else {
None
};
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
Ok(ast::ParamTypeDecl::new(
span,
ast::ParamTypeDeclData { name, ty },
))
})?;
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
ast::ParamKind::Type(decls)
} else {
let decls = comma_list_nonempty(p, predicate, "parameter name", |p| {
let mut pp = ParallelParser::new();
pp.add("explicit type", |p| {
let ty = parse_explicit_type(p)?;
tail(p, ty)
});
pp.add("implicit type", |p| {
let ty = parse_implicit_type(p)?;
tail(p, ty)
});
fn tail<'n>(
p: &mut dyn AbstractParser<'n>,
ty: Type<'n>,
) -> ReportedResult<ast::ParamValueDecl<'n>> {
let mut span = p.peek(0).1;
let name = parse_identifier_name(p, "parameter name")?;
let (dims, _) = parse_optional_dimensions(p)?;
let expr = if p.try_eat(Operator(Op::Assign)) {
Some(parse_expr(p)?)
} else {
None
};
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
span.expand(p.last_span());
Ok(ast::ParamValueDecl::new(
span,
ast::ParamValueDeclData {
ty,
name,
dims,
expr,
},
))
}
pp.finish(p, "explicit or implicit type")
})?;
p.anticipate(&[Semicolon, Comma, CloseDelim(Paren)])?;
ast::ParamKind::Value(decls)
};
span.expand(p.last_span());
Ok(ast::ParamDecl::new(
span,
ast::ParamDeclData { local, kind },
))
}
fn parse_hname<'n>(p: &mut dyn AbstractParser<'n>, msg: &str) -> ReportedResult<ast::Identifier> {
parse_identifier(p, msg)
}
fn try_builtin_system_task<'n>(
p: &mut dyn AbstractParser<'n>,
name: Spanned<Name>,
mut span: Span,
) -> ReportedResult<Option<Expr<'n>>> {
Ok(match name.value.as_str().as_ref() {
"bits" => {
let arg = flanked(p, Paren, |p| parse_type_or_expr(p, &[CloseDelim(Paren)]))?;
span.expand(p.last_span());
Some(ast::Expr::new(span, ast::BitsExpr { name, arg }))
}
_ => None,
})
}