use super::tokens::{Symbols, TokenStream, Tokenizer};
use crate::ast::DesignFile;
use crate::data::*;
use crate::standard::VHDLStandard;
use crate::syntax::design_unit::parse_design_file;
use crate::{Token, TokenId};
use std::io;
use std::sync::Arc;
use vhdl_lang::TokenAccess;
pub struct VHDLParser {
pub symbols: Arc<Symbols>,
pub standard: VHDLStandard,
}
pub(crate) struct ParsingContext<'a> {
pub stream: &'a TokenStream<'a>,
pub diagnostics: &'a mut dyn DiagnosticHandler,
pub standard: VHDLStandard,
}
impl TokenAccess for ParsingContext<'_> {
fn get_token(&self, id: TokenId) -> &Token {
self.stream.get_token(id)
}
fn get_token_slice(&self, start_id: TokenId, end_id: TokenId) -> &[Token] {
self.stream.get_token_slice(start_id, end_id)
}
}
pub type ParserResult = Result<(Source, DesignFile), io::Error>;
impl VHDLParser {
pub fn new(vhdl_standard: VHDLStandard) -> VHDLParser {
VHDLParser {
symbols: Arc::new(Symbols::from_standard(vhdl_standard)),
standard: vhdl_standard,
}
}
pub fn symbol(&self, name: &Latin1String) -> Symbol {
self.symbols.symtab().insert(name)
}
pub fn parse_design_source(
&self,
source: &Source,
diagnostics: &mut dyn DiagnosticHandler,
) -> DesignFile {
let contents = source.contents();
let tokenizer = Tokenizer::new(&self.symbols, source, ContentReader::new(&contents));
let stream = TokenStream::new(tokenizer, diagnostics);
let mut ctx = ParsingContext {
stream: &stream,
diagnostics,
standard: self.standard,
};
match parse_design_file(&mut ctx) {
Ok(design_file) => design_file,
Err(diagnostic) => {
diagnostics.push(diagnostic);
DesignFile::default()
}
}
}
pub fn parse_design_file(
&self,
file_name: &Path,
diagnostics: &mut dyn DiagnosticHandler,
) -> ParserResult {
let source = Source::from_latin1_file(file_name)?;
let design_file = self.parse_design_source(&source, diagnostics);
Ok((source, design_file))
}
}