use crate::construct::partial_space_or_tab::{space_or_tab, space_or_tab_min_max};
use crate::event::{Content, Link, Name};
use crate::state::{Name as StateName, State};
use crate::tokenizer::Tokenizer;
use crate::util::{
constant::{CODE_FENCED_SEQUENCE_SIZE_MIN, MATH_FLOW_SEQUENCE_SIZE_MIN, TAB_SIZE},
slice::{Position, Slice},
};
pub fn start(tokenizer: &mut Tokenizer) -> State {
if tokenizer.parse_state.options.constructs.code_fenced
|| tokenizer.parse_state.options.constructs.math_flow
{
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(
State::Next(StateName::RawFlowBeforeSequenceOpen),
State::Nok,
);
return State::Retry(space_or_tab_min_max(
tokenizer,
0,
if tokenizer.parse_state.options.constructs.code_indented {
TAB_SIZE - 1
} else {
usize::MAX
},
));
}
if matches!(tokenizer.current, Some(b'$' | b'`' | b'~')) {
return State::Retry(StateName::RawFlowBeforeSequenceOpen);
}
}
State::Nok
}
pub fn before_sequence_open(tokenizer: &mut Tokenizer) -> State {
let tail = tokenizer.events.last();
let mut prefix = 0;
if let Some(event) = tail {
if event.name == Name::SpaceOrTab {
prefix = Slice::from_position(
tokenizer.parse_state.bytes,
&Position::from_exit_event(&tokenizer.events, tokenizer.events.len() - 1),
)
.len();
}
}
if (tokenizer.parse_state.options.constructs.code_fenced
&& matches!(tokenizer.current, Some(b'`' | b'~')))
|| (tokenizer.parse_state.options.constructs.math_flow && tokenizer.current == Some(b'$'))
{
tokenizer.tokenize_state.marker = tokenizer.current.unwrap();
tokenizer.tokenize_state.size_c = prefix;
if tokenizer.tokenize_state.marker == b'$' {
tokenizer.tokenize_state.token_1 = Name::MathFlow;
tokenizer.tokenize_state.token_2 = Name::MathFlowFence;
tokenizer.tokenize_state.token_3 = Name::MathFlowFenceSequence;
tokenizer.tokenize_state.token_5 = Name::MathFlowFenceMeta;
tokenizer.tokenize_state.token_6 = Name::MathFlowChunk;
} else {
tokenizer.tokenize_state.token_1 = Name::CodeFenced;
tokenizer.tokenize_state.token_2 = Name::CodeFencedFence;
tokenizer.tokenize_state.token_3 = Name::CodeFencedFenceSequence;
tokenizer.tokenize_state.token_4 = Name::CodeFencedFenceInfo;
tokenizer.tokenize_state.token_5 = Name::CodeFencedFenceMeta;
tokenizer.tokenize_state.token_6 = Name::CodeFlowChunk;
}
tokenizer.enter(tokenizer.tokenize_state.token_1.clone());
tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
State::Retry(StateName::RawFlowSequenceOpen)
} else {
State::Nok
}
}
pub fn sequence_open(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
tokenizer.tokenize_state.size += 1;
tokenizer.consume();
State::Next(StateName::RawFlowSequenceOpen)
} else if tokenizer.tokenize_state.size
< (if tokenizer.tokenize_state.marker == b'$' {
MATH_FLOW_SEQUENCE_SIZE_MIN
} else {
CODE_FENCED_SEQUENCE_SIZE_MIN
})
{
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
tokenizer.tokenize_state.token_1 = Name::Data;
tokenizer.tokenize_state.token_2 = Name::Data;
tokenizer.tokenize_state.token_3 = Name::Data;
tokenizer.tokenize_state.token_4 = Name::Data;
tokenizer.tokenize_state.token_5 = Name::Data;
tokenizer.tokenize_state.token_6 = Name::Data;
State::Nok
} else {
let next = if tokenizer.tokenize_state.marker == b'$' {
StateName::RawFlowMetaBefore
} else {
StateName::RawFlowInfoBefore
};
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
tokenizer.attempt(State::Next(next), State::Nok);
State::Retry(space_or_tab(tokenizer))
} else {
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
State::Retry(next)
}
}
}
pub fn info_before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
tokenizer.concrete = true;
tokenizer.check(
State::Next(StateName::RawFlowAtNonLazyBreak),
State::Next(StateName::RawFlowAfter),
);
State::Retry(StateName::NonLazyContinuationStart)
}
_ => {
tokenizer.enter(tokenizer.tokenize_state.token_4.clone());
tokenizer.enter_link(
Name::Data,
Link {
previous: None,
next: None,
content: Content::String,
},
);
State::Retry(StateName::RawFlowInfo)
}
}
}
pub fn info(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Name::Data);
tokenizer.exit(tokenizer.tokenize_state.token_4.clone());
State::Retry(StateName::RawFlowInfoBefore)
}
Some(b'\t' | b' ') => {
tokenizer.exit(Name::Data);
tokenizer.exit(tokenizer.tokenize_state.token_4.clone());
tokenizer.attempt(State::Next(StateName::RawFlowMetaBefore), State::Nok);
State::Retry(space_or_tab(tokenizer))
}
Some(byte) => {
if tokenizer.tokenize_state.marker == byte && matches!(byte, b'$' | b'`') {
tokenizer.concrete = false;
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
tokenizer.tokenize_state.token_1 = Name::Data;
tokenizer.tokenize_state.token_2 = Name::Data;
tokenizer.tokenize_state.token_3 = Name::Data;
tokenizer.tokenize_state.token_4 = Name::Data;
tokenizer.tokenize_state.token_5 = Name::Data;
tokenizer.tokenize_state.token_6 = Name::Data;
State::Nok
} else {
tokenizer.consume();
State::Next(StateName::RawFlowInfo)
}
}
}
}
pub fn meta_before(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => State::Retry(StateName::RawFlowInfoBefore),
_ => {
tokenizer.enter(tokenizer.tokenize_state.token_5.clone());
tokenizer.enter_link(
Name::Data,
Link {
previous: None,
next: None,
content: Content::String,
},
);
State::Retry(StateName::RawFlowMeta)
}
}
}
pub fn meta(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(Name::Data);
tokenizer.exit(tokenizer.tokenize_state.token_5.clone());
State::Retry(StateName::RawFlowInfoBefore)
}
Some(byte) => {
if tokenizer.tokenize_state.marker == byte && matches!(byte, b'$' | b'`') {
tokenizer.concrete = false;
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
tokenizer.tokenize_state.token_1 = Name::Data;
tokenizer.tokenize_state.token_2 = Name::Data;
tokenizer.tokenize_state.token_3 = Name::Data;
tokenizer.tokenize_state.token_4 = Name::Data;
tokenizer.tokenize_state.token_5 = Name::Data;
tokenizer.tokenize_state.token_6 = Name::Data;
State::Nok
} else {
tokenizer.consume();
State::Next(StateName::RawFlowMeta)
}
}
}
}
pub fn at_non_lazy_break(tokenizer: &mut Tokenizer) -> State {
tokenizer.attempt(
State::Next(StateName::RawFlowAfter),
State::Next(StateName::RawFlowContentBefore),
);
tokenizer.enter(Name::LineEnding);
tokenizer.consume();
tokenizer.exit(Name::LineEnding);
State::Next(StateName::RawFlowCloseStart)
}
pub fn close_start(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(tokenizer.tokenize_state.token_2.clone());
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(
State::Next(StateName::RawFlowBeforeSequenceClose),
State::Nok,
);
State::Retry(space_or_tab_min_max(
tokenizer,
0,
if tokenizer.parse_state.options.constructs.code_indented {
TAB_SIZE - 1
} else {
usize::MAX
},
))
} else {
State::Retry(StateName::RawFlowBeforeSequenceClose)
}
}
pub fn before_sequence_close(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
tokenizer.enter(tokenizer.tokenize_state.token_3.clone());
State::Retry(StateName::RawFlowSequenceClose)
} else {
State::Nok
}
}
pub fn sequence_close(tokenizer: &mut Tokenizer) -> State {
if tokenizer.current == Some(tokenizer.tokenize_state.marker) {
tokenizer.tokenize_state.size_b += 1;
tokenizer.consume();
State::Next(StateName::RawFlowSequenceClose)
} else if tokenizer.tokenize_state.size_b >= tokenizer.tokenize_state.size {
tokenizer.tokenize_state.size_b = 0;
tokenizer.exit(tokenizer.tokenize_state.token_3.clone());
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(
State::Next(StateName::RawFlowAfterSequenceClose),
State::Nok,
);
State::Retry(space_or_tab(tokenizer))
} else {
State::Retry(StateName::RawFlowAfterSequenceClose)
}
} else {
tokenizer.tokenize_state.size_b = 0;
State::Nok
}
}
pub fn sequence_close_after(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(tokenizer.tokenize_state.token_2.clone());
State::Ok
}
_ => State::Nok,
}
}
pub fn content_before(tokenizer: &mut Tokenizer) -> State {
tokenizer.enter(Name::LineEnding);
tokenizer.consume();
tokenizer.exit(Name::LineEnding);
State::Next(StateName::RawFlowContentStart)
}
pub fn content_start(tokenizer: &mut Tokenizer) -> State {
if matches!(tokenizer.current, Some(b'\t' | b' ')) {
tokenizer.attempt(
State::Next(StateName::RawFlowBeforeContentChunk),
State::Nok,
);
State::Retry(space_or_tab_min_max(
tokenizer,
0,
tokenizer.tokenize_state.size_c,
))
} else {
State::Retry(StateName::RawFlowBeforeContentChunk)
}
}
pub fn before_content_chunk(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.check(
State::Next(StateName::RawFlowAtNonLazyBreak),
State::Next(StateName::RawFlowAfter),
);
State::Retry(StateName::NonLazyContinuationStart)
}
_ => {
tokenizer.enter(tokenizer.tokenize_state.token_6.clone());
State::Retry(StateName::RawFlowContentChunk)
}
}
}
pub fn content_chunk(tokenizer: &mut Tokenizer) -> State {
match tokenizer.current {
None | Some(b'\n') => {
tokenizer.exit(tokenizer.tokenize_state.token_6.clone());
State::Retry(StateName::RawFlowBeforeContentChunk)
}
_ => {
tokenizer.consume();
State::Next(StateName::RawFlowContentChunk)
}
}
}
pub fn after(tokenizer: &mut Tokenizer) -> State {
tokenizer.exit(tokenizer.tokenize_state.token_1.clone());
tokenizer.tokenize_state.marker = 0;
tokenizer.tokenize_state.size_c = 0;
tokenizer.tokenize_state.size = 0;
tokenizer.tokenize_state.token_1 = Name::Data;
tokenizer.tokenize_state.token_2 = Name::Data;
tokenizer.tokenize_state.token_3 = Name::Data;
tokenizer.tokenize_state.token_4 = Name::Data;
tokenizer.tokenize_state.token_5 = Name::Data;
tokenizer.tokenize_state.token_6 = Name::Data;
tokenizer.interrupt = false;
tokenizer.concrete = false;
State::Ok
}