use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::event::{Content, Event, Kind, Link, Name};
use crate::resolve::Name as ResolveName;
use crate::state::{Name as StateName, State};
use crate::subtokenize::Subresult;
use crate::tokenizer::Tokenizer;
use crate::util::{constant::TAB_SIZE, skip};
use alloc::vec;
pub fn start(tokenizer: &mut Tokenizer) -> State {
if tokenizer.parse_state.options.constructs.heading_setext
&& !tokenizer.lazy
&& !tokenizer.pierce
&& (!tokenizer.events.is_empty()
&& matches!(tokenizer.events[skip::opt_back(
&tokenizer.events,
tokenizer.events.len() - 1,
&[Name::LineEnding, Name::SpaceOrTab],
)]
.name, Name::Content | Name::HeadingSetextUnderline))
{
tokenizer.enter(Name::HeadingSetextUnderline);
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(State::Next(StateName::HeadingSetextBefore), State::Nok);
State::Retry(space_or_tab_min_max(
tokenizer,
0,
if tokenizer.parse_state.options.constructs.code_indented {
TAB_SIZE - 1
} else {
usize::MAX
},
))
} else {
State::Retry(StateName::HeadingSetextBefore)
}
} else {
State::Nok
}
}
pub fn before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
Some(b'-' | b'=') => {
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
tokenizer.enter(Name::HeadingSetextUnderlineSequence);
State::Retry(StateName::HeadingSetextInside)
}
_ => State::Nok,
}
}
pub fn inside(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
tokenizer.consume();
State::Next(StateName::HeadingSetextInside)
} else {
tokenizer.tokenize_state.marker = 0;
tokenizer.exit(Name::HeadingSetextUnderlineSequence);
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(State::Next(StateName::HeadingSetextAfter), State::Nok);
State::Retry(space_or_tab(tokenizer))
} else {
State::Retry(StateName::HeadingSetextAfter)
}
}
}
pub fn after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.interrupt = false;
tokenizer.register_resolver(ResolveName::HeadingSetext);
tokenizer.exit(Name::HeadingSetextUnderline);
State::Ok
}
_ => State::Nok,
}
}
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
let mut enter = skip::to(&tokenizer.events, 0, &[Name::HeadingSetextUnderline]);
while enter < tokenizer.events.len() {
let exit = skip::to(
&tokenizer.events,
enter + 1,
&[Name::HeadingSetextUnderline],
);
let paragraph_exit_before = skip::opt_back(
&tokenizer.events,
enter - 1,
&[Name::SpaceOrTab, Name::LineEnding, Name::BlockQuotePrefix],
);
if tokenizer.events[paragraph_exit_before].name == Name::Paragraph {
let paragraph_enter = skip::to_back(
&tokenizer.events,
paragraph_exit_before - 1,
&[Name::Paragraph],
);
tokenizer.events[paragraph_enter].name = Name::HeadingSetextText;
tokenizer.events[paragraph_exit_before].name = Name::HeadingSetextText;
let mut heading_enter = tokenizer.events[paragraph_enter].clone();
heading_enter.name = Name::HeadingSetext;
tokenizer.map.add(paragraph_enter, 0, vec![heading_enter]);
let mut heading_exit = tokenizer.events[exit].clone();
heading_exit.name = Name::HeadingSetext;
tokenizer.map.add(exit + 1, 0, vec![heading_exit]);
} else {
if exit + 3 < tokenizer.events.len()
&& tokenizer.events[exit + 1].name == Name::LineEnding
&& tokenizer.events[exit + 3].name == Name::Paragraph
{
tokenizer.events[enter].name = Name::Paragraph;
tokenizer.events[exit + 1].name = Name::Data;
tokenizer.events[exit + 2].name = Name::Data;
tokenizer.events[exit + 1].point = tokenizer.events[enter].point.clone();
tokenizer.events[exit + 1].link = Some(Link {
previous: None,
next: Some(exit + 4),
content: Content::Text,
});
tokenizer.events[exit + 4].link.as_mut().unwrap().previous = Some(exit + 1);
tokenizer.map.add(enter + 1, exit - enter, vec![]);
tokenizer.map.add(exit + 3, 1, vec![]);
} else {
tokenizer.events[enter].name = Name::Paragraph;
tokenizer.events[exit].name = Name::Paragraph;
tokenizer.map.add(
enter + 1,
exit - enter - 1,
vec![
Event {
name: Name::Data,
kind: Kind::Enter,
point: tokenizer.events[enter].point.clone(),
link: Some(Link {
previous: None,
next: None,
content: Content::Text,
}),
},
Event {
name: Name::Data,
kind: Kind::Exit,
point: tokenizer.events[exit].point.clone(),
link: None,
},
],
);
}
}
enter = skip::to(&tokenizer.events, exit + 1, &[Name::HeadingSetextUnderline]);
}
tokenizer.map.consume(&mut tokenizer.events);
None
}