use crate::event::Name;
use crate::message;
use crate::state::{Name as StateName, State};
use crate::tokenizer::Tokenizer;
use crate::util::{mdx_collect::collect, slice::Slice};
use crate::MdxSignal;
use alloc::boxed::Box;
pub fn start(tokenizer: &mut Tokenizer) -> State {
if tokenizer.parse_state.options.constructs.mdx_esm
&& tokenizer.parse_state.options.mdx_esm_parse.is_some()
&& !tokenizer.interrupt
&& tokenizer.point.column == 1
&& matches!(tokenizer.current, Some(b'e' | b'i'))
{
tokenizer.tokenize_state.start = tokenizer.point.index;
tokenizer.enter(Name::MdxEsm);
tokenizer.enter(Name::MdxEsmData);
tokenizer.consume();
State::Next(StateName::MdxEsmWord)
} else {
State::Nok
}
}
pub fn word(tokenizer: &mut Tokenizer) -> State {
if matches!(tokenizer.current, Some(b'a'..=b'z')) {
tokenizer.consume();
State::Next(StateName::MdxEsmWord)
} else {
let slice = Slice::from_indices(
tokenizer.parse_state.bytes,
tokenizer.tokenize_state.start,
tokenizer.point.index,
);
if matches!(slice.as_str(), "export" | "import") && tokenizer.current == Some(b' ') {
tokenizer.concrete = true;
tokenizer.tokenize_state.start = tokenizer.events.len() - 1;
tokenizer.consume();
State::Next(StateName::MdxEsmInside)
} else {
tokenizer.tokenize_state.start = 0;
State::Nok
}
}
}
pub fn inside(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Name::MdxEsmData);
State::Retry(StateName::MdxEsmLineStart)
}
_ => {
tokenizer.consume();
State::Next(StateName::MdxEsmInside)
}
}
}
pub fn line_start(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None => State::Retry(StateName::MdxEsmAtEnd),
Some(b'\n') => {
tokenizer.check(
State::Next(StateName::MdxEsmAtEnd),
State::Next(StateName::MdxEsmContinuationStart),
);
State::Retry(StateName::MdxEsmBlankLineBefore)
}
_ => {
tokenizer.enter(Name::MdxEsmData);
tokenizer.consume();
State::Next(StateName::MdxEsmInside)
}
}
}
pub fn continuation_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Name::LineEnding);
tokenizer.consume();
tokenizer.exit(Name::LineEnding);
State::Next(StateName::MdxEsmLineStart)
}
pub fn blank_line_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Name::LineEnding);
tokenizer.consume();
tokenizer.exit(Name::LineEnding);
State::Next(StateName::BlankLineStart)
}
pub fn at_end(tokenizer: &mut Tokenizer) -> State {
let result = parse_esm(tokenizer);
if matches!(result, State::Ok) {
tokenizer.concrete = false;
tokenizer.exit(Name::MdxEsm);
}
result
}
fn parse_esm(tokenizer: &mut Tokenizer) -> State {
let parse = tokenizer
.parse_state
.options
.mdx_esm_parse
.as_ref()
.unwrap();
let result = collect(
&tokenizer.events,
tokenizer.parse_state.bytes,
tokenizer.tokenize_state.start,
&[Name::MdxEsmData, Name::LineEnding],
&[],
);
match parse(&result.value) {
MdxSignal::Ok => State::Ok,
MdxSignal::Error(message, relative, source, rule_id) => {
let point = tokenizer
.parse_state
.location
.as_ref()
.expect("expected location index if aware mdx is on")
.relative_to_point(&result.stops, relative)
.expect("expected non-empty string");
State::Error(message::Message {
place: Some(Box::new(message::Place::Point(point))),
reason: message,
source,
rule_id,
})
}
MdxSignal::Eof(message, source, rule_id) => {
if tokenizer.current.is_none() {
State::Error(message::Message {
place: Some(Box::new(message::Place::Point(tokenizer.point.to_unist()))),
reason: message,
source,
rule_id,
})
} else {
tokenizer.tokenize_state.mdx_last_parse_error = Some((message, *source, *rule_id));
State::Retry(StateName::MdxEsmContinuationStart)
}
}
}
}