use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::event::Name;
use crate::state::{Name as StateName, State};
use crate::tokenizer::Tokenizer;
use crate::util::constant::TAB_SIZE;
pub fn start(tokenizer: &mut Tokenizer) -> State {
if tokenizer.parse_state.options.constructs.mdx_expression_flow {
tokenizer.tokenize_state.token_1 = Name::MdxFlowExpression;
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(State::Next(StateName::MdxExpressionFlowBefore), State::Nok);
State::Retry(space_or_tab_min_max(
tokenizer,
0,
if tokenizer.parse_state.options.constructs.code_indented {
TAB_SIZE - 1
} else {
usize::MAX
},
))
} else {
State::Retry(StateName::MdxExpressionFlowBefore)
}
} else {
State::Nok
}
}
pub fn before(tokenizer: &mut Tokenizer) -> State {
if Some(b'{') == tokenizer.current {
tokenizer.concrete = true;
tokenizer.attempt(State::Next(StateName::MdxExpressionFlowAfter), State::Nok);
State::Retry(StateName::MdxExpressionStart)
} else {
State::Nok
}
}
pub fn after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'\t' | b' ') => {
tokenizer.attempt(State::Next(StateName::MdxExpressionFlowEnd), State::Nok);
State::Retry(space_or_tab(tokenizer))
}
_ => State::Retry(StateName::MdxExpressionFlowEnd),
}
}
pub fn end(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
reset(tokenizer);
State::Ok
}
Some(b'<') if tokenizer.parse_state.options.constructs.mdx_jsx_flow => {
tokenizer.tokenize_state.token_1 = Name::MdxJsxFlowTag;
tokenizer.attempt(
State::Next(StateName::MdxJsxFlowAfter),
State::Next(StateName::MdxJsxFlowNok),
);
State::Retry(StateName::MdxJsxStart)
}
_ => {
reset(tokenizer);
State::Nok
}
}
}
fn reset(tokenizer: &mut Tokenizer) {
tokenizer.concrete = false;
tokenizer.tokenize_state.token_1 = Name::Data;
}