use crate::event::Name;
use crate::state::{Name as StateName, State};
use crate::tokenizer::Tokenizer;
pub fn start(tokenizer: &mut Tokenizer) -> State {
if ((tokenizer.parse_state.options.constructs.code_text && tokenizer.current == Some(b'`'))
|| (tokenizer.parse_state.options.constructs.math_text && tokenizer.current == Some(b'$')))
&& (tokenizer.previous != tokenizer.current
|| (!tokenizer.events.is_empty()
&& tokenizer.events[tokenizer.events.len() - 1].name == Name::CharacterEscape))
{
let marker = tokenizer.current.unwrap();
if marker == b'`' {
tokenizer.tokenize_state.token_1 = Name::CodeText;
tokenizer.tokenize_state.token_2 = Name::CodeTextSequence;
tokenizer.tokenize_state.token_3 = Name::CodeTextData;
} else {
tokenizer.tokenize_state.token_1 = Name::MathText;
tokenizer.tokenize_state.token_2 = Name::MathTextSequence;
tokenizer.tokenize_state.token_3 = Name::MathTextData;
}
tokenizer.tokenize_state.marker = marker;
tokenizer.enter(tokenizer.tokenize_state.token_1.clone());
tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
State::Retry(StateName::RawTextSequenceOpen)
} else {
State::Nok
}
}
pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
State::Next(StateName::RawTextSequenceOpen)
}
else if tokenizer.tokenize_state.marker == b'$'
&& tokenizer.tokenize_state.size == 1
&& !tokenizer.parse_state.options.math_text_single_dollar
{
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.size = 0;
tokenizer.tokenize_state.token_1 = Name::Data;
tokenizer.tokenize_state.token_2 = Name::Data;
tokenizer.tokenize_state.token_3 = Name::Data;
State::Nok
} else {
tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
State::Retry(StateName::RawTextBetween)
}
}
pub fn between(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => {
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.size = 0;
tokenizer.tokenize_state.token_1 = Name::Data;
tokenizer.tokenize_state.token_2 = Name::Data;
tokenizer.tokenize_state.token_3 = Name::Data;
State::Nok
}
Some(b'\n') => {
tokenizer.enter(Name::LineEnding);
tokenizer.consume();
tokenizer.exit(Name::LineEnding);
State::Next(StateName::RawTextBetween)
}
_ => {
if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
State::Retry(StateName::RawTextSequenceClose)
} else {
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
State::Retry(StateName::RawTextData)
}
}
}
}
pub fn data(tokenizer: &mut Tokenizer) -> State {
if matches!(tokenizer.current, None | Some(b'\n'))
|| tokenizer.current == Some(tokenizer.tokenize_state.marker)
{
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
State::Retry(StateName::RawTextBetween)
} else {
tokenizer.consume();
State::Next(StateName::RawTextData)
}
}
pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
tokenizer.tokenize_state.size_b += 1;
tokenizer.consume();
State::Next(StateName::RawTextSequenceClose)
} else {
tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
if tokenizer.tokenize_state.size == tokenizer.tokenize_state.size_b {
tokenizer.exit(tokenizer.tokenize_state.token_1.clone());
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.size = 0;
tokenizer.tokenize_state.size_b = 0;
tokenizer.tokenize_state.token_1 = Name::Data;
tokenizer.tokenize_state.token_2 = Name::Data;
tokenizer.tokenize_state.token_3 = Name::Data;
State::Ok
} else {
let len = tokenizer.events.len();
tokenizer.events[len - 2].name = tokenizer.tokenize_state.token_3.clone();
tokenizer.events[len - 1].name = tokenizer.tokenize_state.token_3.clone();
tokenizer.tokenize_state.size_b = 0;
State::Retry(StateName::RawTextBetween)
}
}
}