use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::event::{Content, Event, Kind, Link, Name};
use crate::resolve::Name as ResolveName;
use crate::state::{Name as StateName, State};
use crate::subtokenize::Subresult;
use crate::tokenizer::Tokenizer;
use crate::util::constant::{HEADING_ATX_OPENING_FENCE_SIZE_MAX, TAB_SIZE};
use alloc::vec;
pub fn start(tokenizer: &mut Tokenizer) -> State {
if tokenizer.parse_state.options.constructs.heading_atx {
tokenizer.enter(Name::HeadingAtx);
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(State::Next(StateName::HeadingAtxBefore), State::Nok);
State::Retry(space_or_tab_min_max(
tokenizer,
0,
if tokenizer.parse_state.options.constructs.code_indented {
TAB_SIZE - 1
} else {
usize::MAX
},
))
} else {
State::Retry(StateName::HeadingAtxBefore)
}
} else {
State::Nok
}
}
pub fn before(tokenizer: &mut Tokenizer) -> State {
if Some(b'#') == tokenizer.current {
tokenizer.enter(Name::HeadingAtxSequence);
State::Retry(StateName::HeadingAtxSequenceOpen)
} else {
State::Nok
}
}
pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(b'#')
&& tokenizer.tokenize_state.size < HEADING_ATX_OPENING_FENCE_SIZE_MAX
{
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
State::Next(StateName::HeadingAtxSequenceOpen)
}
else if matches!(tokenizer.current, None | Some(b'\t' | b'\n' | b' ')) {
tokenizer.tokenize_state.size = 0;
tokenizer.exit(Name::HeadingAtxSequence);
State::Retry(StateName::HeadingAtxAtBreak)
} else {
tokenizer.tokenize_state.size = 0;
State::Nok
}
}
pub fn at_break(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Name::HeadingAtx);
tokenizer.register_resolver(ResolveName::HeadingAtx);
tokenizer.interrupt = false;
State::Ok
}
Some(b'\t' | b' ') => {
tokenizer.attempt(State::Next(StateName::HeadingAtxAtBreak), State::Nok);
State::Retry(space_or_tab(tokenizer))
}
Some(b'#') => {
tokenizer.enter(Name::HeadingAtxSequence);
State::Retry(StateName::HeadingAtxSequenceFurther)
}
Some(_) => {
tokenizer.enter_link(
Name::Data,
Link {
previous: None,
next: None,
content: Content::Text,
},
);
State::Retry(StateName::HeadingAtxData)
}
}
}
pub fn sequence_further(tokenizer: &mut Tokenizer) -> State {
if let Some(b'#') = tokenizer.current {
tokenizer.consume();
State::Next(StateName::HeadingAtxSequenceFurther)
} else {
tokenizer.exit(Name::HeadingAtxSequence);
State::Retry(StateName::HeadingAtxAtBreak)
}
}
pub fn data(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\t' | b'\n' | b' ') => {
tokenizer.exit(Name::Data);
State::Retry(StateName::HeadingAtxAtBreak)
}
_ => {
tokenizer.consume();
State::Next(StateName::HeadingAtxData)
}
}
}
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
let mut index = 0;
let mut heading_inside = false;
let mut data_start: Option<usize> = None;
let mut data_end: Option<usize> = None;
while index < tokenizer.events.len() {
let event = &tokenizer.events[index];
if event.name == Name::HeadingAtx {
if event.kind == Kind::Enter {
heading_inside = true;
} else {
if let Some(start) = data_start {
let end = data_end.unwrap();
tokenizer.map.add(
start,
0,
vec![Event {
kind: Kind::Enter,
name: Name::HeadingAtxText,
point: tokenizer.events[start].point.clone(),
link: None,
}],
);
tokenizer.map.add(start + 1, end - start - 1, vec![]);
tokenizer.map.add(
end + 1,
0,
vec![Event {
kind: Kind::Exit,
name: Name::HeadingAtxText,
point: tokenizer.events[end].point.clone(),
link: None,
}],
);
}
heading_inside = false;
data_start = None;
data_end = None;
}
} else if heading_inside && event.name == Name::Data {
if event.kind == Kind::Enter {
if data_start.is_none() {
data_start = Some(index);
}
} else {
data_end = Some(index);
}
}
index += 1;
}
tokenizer.map.consume(&mut tokenizer.events);
None
}