pub(crate) mod ast;
pub(crate) mod evaluator;
pub(crate) mod expression;
pub(crate) mod lexer;
pub mod parser_error;
pub(crate) mod token;
pub(crate) mod validator;
use crate::{
chord::{
Chord,
interval::Interval,
note::{Note, NoteLiteral, RootModifier},
},
parsing::{evaluator::Evaluator, expression::*},
};
use ast::Ast;
use expression::Exp;
use lexer::Lexer;
use parser_error::{ParserError, ParserErrors};
use std::{iter::Peekable, slice::Iter};
use token::{Token, TokenType};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Context {
None,
Sus,
Group(GroupContext),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct GroupContext {
kind: GroupKind,
active: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum GroupKind {
Omit,
Add,
}
impl Context {
fn start_group(kind: GroupKind) -> Self {
Context::Group(GroupContext {
kind,
active: false,
})
}
fn on_comma(self) -> Self {
match self {
Context::Group(mut group) => {
group.active = true;
Context::Group(group)
}
Context::Sus => Context::None,
Context::None => Context::None,
}
}
}
pub struct Parser {
lexer: Lexer,
errors: Vec<ParserError>,
open_parent_count: i16,
context: Context,
}
impl Parser {
pub fn new() -> Parser {
Parser {
lexer: Lexer::new(),
errors: Vec::new(),
open_parent_count: 0,
context: Context::None,
}
}
pub fn parse(&mut self, input: &str) -> Result<Chord, ParserErrors> {
self.init();
let mut ast = Ast::default();
let mut tokens = Vec::with_capacity(input.len());
let binding = self.lexer.scan_tokens(input, &mut tokens);
let tokens = Self::pre_process(&binding);
let mut tokens = tokens.iter().peekable();
self.read_tokens(&mut tokens, &mut ast);
if !self.errors.is_empty() {
return Err(ParserErrors::new(self.errors.clone()));
}
Evaluator::evaluate(&ast, input.into())
}
fn init(&mut self) {
self.errors.clear();
self.open_parent_count = 0;
self.context = Context::None;
}
fn read_tokens(&mut self, tokens: &mut Peekable<Iter<Token>>, ast: &mut Ast) {
self.read_root(tokens, ast);
let mut next = tokens.next();
while next.is_some() {
self.process_token(next.unwrap(), tokens, ast);
next = tokens.next();
}
}
fn read_root(&mut self, tokens: &mut Peekable<Iter<Token>>, ast: &mut Ast) {
match self.expect_note(tokens) {
Some(note) => ast.root = note,
None => self.errors.push(ParserError::MissingRootNote),
}
}
fn process_token(&mut self, token: &Token, tokens: &mut Peekable<Iter<Token>>, ast: &mut Ast) {
match &token.token_type {
TokenType::Note(_) => self.note(token),
TokenType::Sharp => self.modifier(tokens, RootModifier::Sharp, token, ast),
TokenType::Flat => self.modifier(tokens, RootModifier::Flat, token, ast),
TokenType::Aug => self.aug(tokens, ast),
TokenType::Dim => ast.expressions.push(Exp::Dim),
TokenType::Dim7 => ast.expressions.push(Exp::Dim7),
TokenType::HalfDim => ast.expressions.push(Exp::HalfDim),
TokenType::Extension(ext) => self.extension(ext, token, ast),
TokenType::Add => self.add(token, tokens, ast),
TokenType::Omit => self.omit(token, tokens, ast),
TokenType::Alt => ast.expressions.push(Exp::Alt),
TokenType::Sus => self.sus(tokens, ast),
TokenType::Minor => ast.expressions.push(Exp::Minor),
TokenType::Hyphen => self.hyphen(tokens, token.pos, ast),
TokenType::Maj => ast.expressions.push(Exp::Maj),
TokenType::Maj7 => ast.expressions.push(Exp::Maj7),
TokenType::Slash => self.slash(tokens, token, ast),
TokenType::LParent => self.lparen(tokens, token.pos, ast),
TokenType::RParent => self.rparen(token.pos),
TokenType::Comma => self.comma(),
TokenType::Bass => ast.expressions.push(Exp::Bass),
TokenType::Illegal => self.errors.push(ParserError::IllegalToken(token.pos)),
TokenType::Eof => (),
}
}
fn slash(&mut self, tokens: &mut Peekable<Iter<Token>>, token: &Token, ast: &mut Ast) {
if let Some(Token {
token_type: TokenType::Extension(a),
pos,
..
}) = tokens.next_if(|t| self.is_extension(t))
{
match a {
x if *a == 9
&& matches!(
ast.expressions.last(),
Some(Exp::Extension(ExtensionExp {
interval: Interval::MajorSixth,
..
}))
) =>
{
ast.expressions
.push(Exp::Add(AddExp::new(Interval::Ninth, *pos)))
}
_ => {
self.errors
.push(ParserError::IllegalSlashNotation(token.pos));
return;
}
}
} else if let Some(b) = self.expect_note(tokens) {
ast.expressions.push(Exp::SlashBass(SlashBassExp::new(b)));
} else {
self.errors
.push(ParserError::IllegalSlashNotation(token.pos));
return;
}
if !self.expect_peek(TokenType::Eof, tokens) {
self.errors
.push(ParserError::IllegalSlashNotation(token.pos));
}
}
fn hyphen(&mut self, tokens: &mut Peekable<Iter<Token>>, pos: usize, ast: &mut Ast) {
if tokens
.next_if(|t| matches!(t.token_type, TokenType::Extension(e) if e == 5))
.is_some()
{
ast.expressions.push(Exp::Extension(ExtensionExp {
interval: Interval::DiminishedFifth,
pos,
}));
} else {
ast.expressions.push(Exp::Minor);
}
}
fn aug(&mut self, tokens: &mut Peekable<Iter<Token>>, ast: &mut Ast) {
let _ = tokens.next_if(|t| matches!(t.token_type, TokenType::Extension(e) if e == 5));
ast.expressions.push(Exp::Aug);
}
fn rparen(&mut self, pos: usize) {
if self.open_parent_count != 1 {
self.errors
.push(ParserError::UnexpectedClosingParenthesis(pos));
}
self.context = Context::None;
self.open_parent_count -= 1;
}
fn lparen(&mut self, tokens: &mut Peekable<Iter<Token>>, pos: usize, ast: &mut Ast) {
self.open_parent_count += 1;
self.context = Context::None;
while let Some(token) = tokens.next() {
match token.token_type {
TokenType::RParent => {
self.open_parent_count -= 1;
break;
}
TokenType::LParent => {
self.errors.push(ParserError::NestedParenthesis(pos));
}
TokenType::Eof => {
self.errors
.push(ParserError::MissingClosingParenthesis(pos));
break;
}
_ => (),
}
self.process_token(token, tokens, ast);
}
}
fn comma(&mut self) {
self.context = self.context.on_comma();
}
fn omit(&mut self, token: &Token, tokens: &mut Peekable<Iter<Token>>, ast: &mut Ast) {
if self.open_parent_count > 0 {
self.context = Context::start_group(GroupKind::Omit);
}
if self.consume_extension_if(tokens, 5, || {
ast.expressions.push(Exp::Omit(OmitExp::new(
Interval::PerfectFifth,
token.pos + token.len,
)));
}) {
return;
}
if self.consume_extension_if(tokens, 3, || {
ast.expressions.push(Exp::Omit(OmitExp::new(
Interval::MajorThird,
token.pos + token.len,
)));
}) {
return;
}
self.errors.push(ParserError::IllegalOrMissingOmitTarget((
token.pos, token.len,
)));
}
fn add(&mut self, token: &Token, tokens: &mut Peekable<Iter<Token>>, ast: &mut Ast) {
if self.open_parent_count > 0 {
self.context = Context::start_group(GroupKind::Add);
}
let modifier = self.match_modifier(tokens);
if let Some(Token {
token_type: TokenType::Extension(ext),
pos,
..
}) = tokens.next_if(|t| self.is_extension(t))
{
match from_modifier_extension(modifier, *ext) {
Some(interval) => ast.expressions.push(Exp::Add(AddExp::new(interval, *pos))),
None => self.errors.push(ParserError::InvalidExtension(token.pos)),
}
return;
}
if tokens
.next_if(|t| matches!(t.token_type, TokenType::Maj7))
.is_some()
{
ast.expressions.push(Exp::Add(AddExp::new(
Interval::MajorSeventh,
token.pos + token.len,
)));
return;
}
self.errors
.push(ParserError::MissingAddTarget((token.pos, token.len)));
}
fn modifier(
&mut self,
tokens: &mut Peekable<Iter<Token>>,
modifier: RootModifier,
token: &Token,
ast: &mut Ast,
) {
let extension = match tokens.next_if(|t| self.is_extension(t)) {
Some(Token {
token_type: TokenType::Extension(ext),
..
}) => ext,
_ => {
self.errors.push(ParserError::UnexpectedModifier(token.pos));
return;
}
};
match from_modifier_extension(Some(modifier), *extension) {
Some(int) => self.add_interval(int, token.pos, ast),
None => self
.errors
.push(ParserError::InvalidExtension(token.pos + 1)),
}
}
fn is_extension(&self, token: &Token) -> bool {
matches!(token.token_type, TokenType::Extension(_))
}
fn sus(&mut self, tokens: &mut Peekable<Iter<Token>>, ast: &mut Ast) {
self.context = Context::Sus;
if !matches!(
tokens.peek().map(|t| &t.token_type),
Some(TokenType::Extension(_) | TokenType::Sharp | TokenType::Flat)
) {
ast.expressions
.push(Exp::Sus(SusExp::new(Interval::PerfectFourth)));
self.context = Context::None;
}
}
fn add_sus_exp(&mut self, int: Interval, ast: &mut Ast) {
ast.expressions.push(Exp::Sus(SusExp::new(int)));
self.context = Context::None;
}
fn extension(&mut self, ext: &u8, token: &Token, ast: &mut Ast) {
if *ext == 5 && self.context == Context::None {
ast.expressions.push(Exp::Power);
} else if let Some(int) = from_modifier_extension(None, *ext) {
self.add_interval(int, token.pos, ast);
} else {
self.errors.push(ParserError::InvalidExtension(token.pos));
}
}
fn note(&mut self, token: &Token) {
self.errors.push(ParserError::UnexpectedNote(token.pos));
}
fn add_interval(&mut self, int: Interval, pos: usize, ast: &mut Ast) {
match self.context {
Context::Sus => {
if self.allowed_sus_interval(int) {
self.add_sus_exp(int, ast);
} else {
self.add_sus_exp(Interval::PerfectFourth, ast);
ast.expressions
.push(Exp::Extension(ExtensionExp::new(int, pos)));
}
}
Context::Group(g) if g.active => match g.kind {
GroupKind::Omit => ast.expressions.push(Exp::Omit(OmitExp::new(int, pos))),
GroupKind::Add => ast.expressions.push(Exp::Add(AddExp::new(int, pos))),
},
_ => match int {
Interval::PerfectFourth => ast.expressions.push(Exp::Sus(SusExp::new(int))),
Interval::AugmentedFourth => self.errors.push(ParserError::InvalidExtension(pos)),
_ => ast
.expressions
.push(Exp::Extension(ExtensionExp::new(int, pos))),
},
}
}
fn consume_extension_if<F>(
&mut self,
tokens: &mut Peekable<Iter<Token>>,
target: u8,
f: F,
) -> bool
where
F: FnOnce(),
{
if let Some(Token {
token_type: TokenType::Extension(..),
..
}) = tokens.next_if(|t| matches!(t.token_type, TokenType::Extension(e) if e == target))
{
f();
true
} else {
false
}
}
fn allowed_sus_interval(&self, int: Interval) -> bool {
matches!(
int,
Interval::MinorSecond
| Interval::MajorSecond
| Interval::PerfectFourth
| Interval::AugmentedFourth
)
}
fn match_modifier(&self, tokens: &mut Peekable<Iter<Token>>) -> Option<RootModifier> {
let modifier = match tokens.peek()?.token_type {
TokenType::Flat => RootModifier::Flat,
TokenType::Sharp => RootModifier::Sharp,
_ => return None,
};
tokens.next();
Some(modifier)
}
fn expect_note(&mut self, tokens: &mut Peekable<Iter<Token>>) -> Option<Note> {
let TokenType::Note(n) = &tokens.next()?.token_type else {
return None;
};
let modifier = self.match_modifier(tokens);
Some(Note::new(
NoteLiteral::from_string(n),
modifier.map(|m| m.into()),
))
}
fn expect_peek(&self, expected: TokenType, tokens: &mut Peekable<Iter<Token>>) -> bool {
matches!(tokens.peek(), Some(token) if token.token_type == expected)
}
fn pre_process<'a>(tokens: &[Token<'a>]) -> Vec<Token<'a>> {
Self::fold_7(
&Self::fold_7(&Self::concat_maj7(tokens), TokenType::Dim, TokenType::Dim7),
TokenType::Maj,
TokenType::Maj7,
)
}
fn concat_maj7<'a>(tokens: &[Token<'a>]) -> Vec<Token<'a>> {
let mut out = Vec::with_capacity(tokens.len());
let mut i = 0;
while i < tokens.len() {
match (&tokens[i].token_type, tokens.get(i + 1)) {
(TokenType::Maj | TokenType::Maj7, Some(next))
if matches!(next.token_type, TokenType::Extension(7)) =>
{
out.push(Token {
token_type: TokenType::Maj7,
pos: tokens[i].pos,
len: tokens[i].len + next.len,
synthetic: true,
});
i += 2;
}
_ => {
out.push(tokens[i].clone());
i += 1;
}
}
}
out
}
fn fold_7<'a>(
tokens: &[Token<'a>],
match_token: TokenType,
insert_token_type: TokenType<'a>,
) -> Vec<Token<'a>> {
let mut out: Vec<Token> = Vec::with_capacity(tokens.len());
let mut pending_match = Vec::new();
let mut pending_seven = Vec::new();
for token in tokens {
let current_idx = out.len();
match &token.token_type {
t if *t == match_token && !token.synthetic => {
if let Some(prev_idx) = pending_seven.pop() {
out[prev_idx] =
Self::merge_tokens(&out[prev_idx], token, &insert_token_type);
} else {
pending_match.push(current_idx);
out.push(token.clone());
}
}
TokenType::Extension(7) => {
if let Some(prev_idx) = pending_match.pop() {
out[prev_idx] =
Self::merge_tokens(&out[prev_idx], token, &insert_token_type);
} else {
pending_seven.push(current_idx);
out.push(token.clone());
}
}
_ => out.push(token.clone()),
}
}
out
}
fn merge_tokens<'a>(t1: &Token, t2: &Token, new_type: &TokenType<'a>) -> Token<'a> {
Token {
token_type: new_type.clone(),
pos: t1.pos.min(t2.pos),
len: t1.len,
synthetic: true,
}
}
}
impl Default for Parser {
fn default() -> Self {
Self::new()
}
}
fn from_modifier_extension(mdf: Option<RootModifier>, ext: u8) -> Option<Interval> {
match (mdf, ext) {
(None, 1) => Some(Interval::Unison),
(None, 8) => Some(Interval::Octave),
(Some(RootModifier::Flat), 2) => Some(Interval::MinorSecond),
(None, 2) => Some(Interval::MajorSecond),
(Some(RootModifier::Flat), 9) => Some(Interval::FlatNinth),
(None, 9) => Some(Interval::Ninth),
(Some(RootModifier::Sharp), 9) => Some(Interval::SharpNinth),
(Some(RootModifier::Flat), 3) => Some(Interval::MinorThird),
(None, 3) => Some(Interval::MajorThird),
(None, 4) => Some(Interval::PerfectFourth),
(Some(RootModifier::Sharp), 4) => Some(Interval::AugmentedFourth),
(None, 11) => Some(Interval::Eleventh),
(Some(RootModifier::Sharp), 11) => Some(Interval::SharpEleventh),
(Some(RootModifier::Flat), 5) => Some(Interval::DiminishedFifth),
(None, 5) => Some(Interval::PerfectFifth),
(Some(RootModifier::Sharp), 5) => Some(Interval::AugmentedFifth),
(Some(RootModifier::Flat), 6) => Some(Interval::MinorSixth),
(None, 6) => Some(Interval::MajorSixth),
(Some(RootModifier::Flat), 13) => Some(Interval::FlatThirteenth),
(None, 13) => Some(Interval::Thirteenth),
(Some(RootModifier::Flat), 7) => Some(Interval::MinorSeventh),
(None, 7) => Some(Interval::MinorSeventh),
_ => None,
}
}