Skip to main content

oak_markdown/lexer/
mod.rs

1#![doc = include_str!("readme.md")]
2/// Token types for the Markdown language.
3pub mod token_type;
4
5use crate::{language::MarkdownLanguage, lexer::token_type::MarkdownTokenType};
6use oak_core::{Lexer, LexerCache, LexerState, TextEdit, errors::OakError, lexer::LexOutput, source::Source};
7
8pub(crate) type State<'a, S> = LexerState<'a, S, MarkdownLanguage>;
9
10/// Lexer for Markdown language.
11#[derive(Clone, Debug)]
12pub struct MarkdownLexer<'config> {
13    config: &'config MarkdownLanguage,
14}
15
16impl<'config> MarkdownLexer<'config> {
17    /// Creates a new MarkdownLexer with the given configuration.
18    pub fn new(config: &'config MarkdownLanguage) -> Self {
19        Self { config }
20    }
21
22    fn run<S: Source + ?Sized>(&self, state: &mut State<S>) -> Result<(), OakError> {
23        while state.not_at_end() {
24            let safe_point = state.get_position();
25
26            if let Some(ch) = state.peek() {
27                match ch {
28                    ' ' | '\t' => {
29                        if self.config.allow_indented_code_blocks && self.lex_indented_code_block(state) {
30                            continue;
31                        }
32                        self.skip_whitespace(state);
33                    }
34                    '\n' | '\r' => {
35                        self.lex_newline(state);
36                    }
37                    '$' if self.config.allow_math => {
38                        if self.lex_math(state) {
39                            continue;
40                        }
41                        self.lex_special_char(state);
42                    }
43                    '^' if self.config.allow_sub_superscript || self.config.allow_footnotes => {
44                        if self.config.allow_footnotes && self.lex_footnote(state) {
45                            continue;
46                        }
47                        if self.config.allow_sub_superscript && self.lex_sub_superscript(state) {
48                            continue;
49                        }
50                        self.lex_special_char(state);
51                    }
52                    '#' => {
53                        if self.config.allow_headings && self.lex_heading(state) {
54                            continue;
55                        }
56                        self.lex_special_char(state);
57                    }
58                    '`' => {
59                        if self.config.allow_fenced_code_blocks && self.lex_code_block(state) {
60                            continue;
61                        }
62                        if self.lex_inline_code(state) {
63                            continue;
64                        }
65                        self.lex_special_char(state);
66                    }
67                    '~' => {
68                        if self.lex_code_block(state) {
69                            continue;
70                        }
71                        if self.config.allow_strikethrough && self.lex_strikethrough(state) {
72                            continue;
73                        }
74                        if self.config.allow_sub_superscript && self.lex_sub_superscript(state) {
75                            continue;
76                        }
77                        self.lex_special_char(state);
78                    }
79                    '*' | '_' => {
80                        if self.config.allow_horizontal_rules && self.lex_horizontal_rule(state) {
81                            continue;
82                        }
83                        if self.config.allow_lists && self.lex_list_marker(state) {
84                            continue;
85                        }
86                        if self.lex_emphasis(state) {
87                            continue;
88                        }
89                        self.lex_special_char(state);
90                    }
91                    '-' => {
92                        if self.config.allow_front_matter && self.lex_front_matter(state) {
93                            continue;
94                        }
95                        if self.config.allow_horizontal_rules && self.lex_horizontal_rule(state) {
96                            continue;
97                        }
98                        if self.config.allow_lists && self.lex_list_marker(state) {
99                            continue;
100                        }
101                        self.lex_special_char(state);
102                    }
103                    '+' => {
104                        if self.config.allow_lists && self.lex_list_marker(state) {
105                            continue;
106                        }
107                        self.lex_special_char(state);
108                    }
109                    '!' => {
110                        if self.lex_link_or_image(state) {
111                            continue;
112                        }
113                        self.lex_special_char(state);
114                    }
115                    '[' => {
116                        if self.config.allow_task_lists && self.lex_task_marker(state) {
117                            continue;
118                        }
119                        if self.lex_link_or_image(state) {
120                            continue;
121                        }
122                        self.lex_special_char(state);
123                    }
124                    '>' => {
125                        if self.config.allow_blockquotes && self.lex_blockquote(state) {
126                            continue;
127                        }
128                        self.lex_special_char(state);
129                    }
130                    '|' if self.config.allow_tables => {
131                        self.lex_special_char(state);
132                    }
133                    '0'..='9' => {
134                        if self.lex_list_marker(state) {
135                            continue;
136                        }
137                        self.lex_text(state);
138                    }
139                    '<' => {
140                        if self.config.allow_html && self.lex_html_tag(state) {
141                            continue;
142                        }
143                        if self.config.allow_xml && self.lex_xml_tag(state) {
144                            continue;
145                        }
146                        self.lex_special_char(state);
147                    }
148                    ']' | '(' | ')' | '|' | '.' | ':' | '\\' => {
149                        self.lex_special_char(state);
150                    }
151                    _ => {
152                        if self.lex_text(state) {
153                            continue;
154                        }
155                        // If no rules match, skip current character and mark as error
156                        let start_pos = state.get_position();
157                        state.advance(ch.len_utf8());
158                        state.add_token(MarkdownTokenType::Error, start_pos, state.get_position());
159                    }
160                }
161            }
162
163            state.advance_if_dead_lock(safe_point)
164        }
165        Ok(())
166    }
167
168    /// Skips whitespace
169    fn skip_whitespace<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
170        let start_pos = state.get_position();
171
172        while let Some(ch) = state.peek() {
173            if ch == ' ' || ch == '\t' {
174                state.advance(ch.len_utf8());
175            }
176            else {
177                break;
178            }
179        }
180
181        if state.get_position() > start_pos {
182            state.add_token(MarkdownTokenType::Whitespace, start_pos, state.get_position());
183            true
184        }
185        else {
186            false
187        }
188    }
189
190    /// Handles newlines
191    fn lex_newline<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
192        let start_pos = state.get_position();
193
194        if let Some('\n') = state.peek() {
195            state.advance(1);
196            state.add_token(MarkdownTokenType::Newline, start_pos, state.get_position());
197            true
198        }
199        else if let Some('\r') = state.peek() {
200            state.advance(1);
201            if let Some('\n') = state.peek() {
202                state.advance(1);
203            }
204            state.add_token(MarkdownTokenType::Newline, start_pos, state.get_position());
205            true
206        }
207        else {
208            false
209        }
210    }
211
212    /// Handles headings.
213    fn lex_heading<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
214        let start_pos = state.get_position();
215
216        // Check if at the beginning of a line.
217        if start_pos > 0 {
218            if let Some(prev_char) = state.source().get_char_at(start_pos - 1) {
219                if prev_char != '\n' && prev_char != '\r' {
220                    return false;
221                }
222            }
223        }
224
225        if let Some('#') = state.peek() {
226            let mut level = 0;
227            let mut pos = start_pos;
228
229            // Count the number of '#'.
230            while let Some('#') = state.source().get_char_at(pos) {
231                level += 1;
232                pos += 1;
233                if level > 6 {
234                    return false; // More than 6 levels, not a valid heading.
235                }
236            }
237
238            // Check if there is whitespace after '#'.
239            if let Some(ch) = state.source().get_char_at(pos) {
240                if ch != ' ' && ch != '\t' && ch != '\n' && ch != '\r' {
241                    return false;
242                }
243            }
244
245            state.advance(level);
246
247            let heading_kind = match level {
248                1 => MarkdownTokenType::Heading1,
249                2 => MarkdownTokenType::Heading2,
250                3 => MarkdownTokenType::Heading3,
251                4 => MarkdownTokenType::Heading4,
252                5 => MarkdownTokenType::Heading5,
253                6 => MarkdownTokenType::Heading6,
254                _ => return false,
255            };
256
257            state.add_token(heading_kind, start_pos, state.get_position());
258            true
259        }
260        else {
261            false
262        }
263    }
264
265    /// Handles inline code.
266    fn lex_inline_code<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
267        let start_pos = state.get_position();
268
269        if let Some('`') = state.peek() {
270            state.advance(1);
271            let mut found_end = false;
272
273            while let Some(ch) = state.peek() {
274                if ch == '`' {
275                    state.advance(1);
276                    found_end = true;
277                    break;
278                }
279                else if ch == '\n' || ch == '\r' {
280                    break; // Inline code cannot span lines.
281                }
282                else {
283                    state.advance(ch.len_utf8());
284                }
285            }
286
287            if found_end {
288                state.add_token(MarkdownTokenType::InlineCode, start_pos, state.get_position());
289                true
290            }
291            else {
292                // Backtrack to start position.
293                state.set_position(start_pos);
294                false
295            }
296        }
297        else {
298            false
299        }
300    }
301
302    /// Handles code blocks.
303    fn lex_code_block<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
304        let start_pos = state.get_position();
305
306        // Check if at the beginning of a line.
307        if start_pos > 0 {
308            if let Some(prev_char) = state.source().get_char_at(start_pos - 1) {
309                if prev_char != '\n' && prev_char != '\r' {
310                    return false;
311                }
312            }
313        }
314
315        // Check if it is ``` or ~~~.
316        let fence_char = if let Some('`') = state.peek() {
317            '`'
318        }
319        else if let Some('~') = state.peek() {
320            '~'
321        }
322        else {
323            return false;
324        };
325
326        let mut fence_count = 0;
327        let mut pos = start_pos;
328
329        // Count fence characters.
330        while let Some(ch) = state.source().get_char_at(pos) {
331            if ch == fence_char {
332                fence_count += 1;
333                pos += 1;
334            }
335            else {
336                break;
337            }
338        }
339
340        if fence_count < 3 {
341            return false; // At least 3 fence characters are required.
342        }
343
344        state.advance(fence_count);
345        state.add_token(MarkdownTokenType::CodeFence, start_pos, state.get_position());
346
347        // Handle language identifier.
348        let lang_start = state.get_position();
349        while let Some(ch) = state.peek() {
350            if ch == '\n' || ch == '\r' {
351                break;
352            }
353            else if ch != ' ' && ch != '\t' {
354                state.advance(ch.len_utf8());
355            }
356            else {
357                break;
358            }
359        }
360
361        if state.get_position() > lang_start {
362            state.add_token(MarkdownTokenType::CodeLanguage, lang_start, state.get_position());
363        }
364
365        true
366    }
367
368    /// Handles emphasis and strong.
369    fn lex_emphasis<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
370        let start_pos = state.get_position();
371
372        let marker_char = if let Some('*') = state.peek() {
373            '*'
374        }
375        else if let Some('_') = state.peek() {
376            '_'
377        }
378        else {
379            return false;
380        };
381
382        let mut marker_count = 0;
383        let mut pos = start_pos;
384
385        // Count marker characters.
386        while let Some(ch) = state.source().get_char_at(pos) {
387            if ch == marker_char {
388                marker_count += 1;
389                pos += 1;
390            }
391            else {
392                break;
393            }
394        }
395
396        if marker_count == 0 {
397            return false;
398        }
399
400        state.advance(marker_count);
401
402        let token_kind = if marker_count >= 2 { MarkdownTokenType::Strong } else { MarkdownTokenType::Emphasis };
403
404        state.add_token(token_kind, start_pos, state.get_position());
405        true
406    }
407
408    /// Handles strikethrough.
409    fn lex_strikethrough<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
410        let start_pos = state.get_position();
411
412        if let Some('~') = state.peek() {
413            if let Some('~') = state.source().get_char_at(start_pos + 1) {
414                state.advance(2);
415                state.add_token(MarkdownTokenType::Strikethrough, start_pos, state.get_position());
416                true
417            }
418            else {
419                false
420            }
421        }
422        else {
423            false
424        }
425    }
426
427    /// Handles links and images.
428    fn lex_link_or_image<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
429        let start_pos = state.get_position();
430
431        // Check if it is an image ![.
432        let is_image = if let Some('!') = state.peek() {
433            state.advance(1);
434            true
435        }
436        else {
437            false
438        };
439
440        if let Some('[') = state.peek() {
441            state.advance(1);
442
443            let token_kind = if is_image { MarkdownTokenType::Image } else { MarkdownTokenType::Link };
444
445            state.add_token(token_kind, start_pos, state.get_position());
446            true
447        }
448        else {
449            if is_image {
450                // Backtrack exclamation.
451                state.set_position(start_pos);
452            }
453            false
454        }
455    }
456
457    /// Handles list markers.
458    fn lex_list_marker<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
459        let start_pos = state.get_position();
460
461        // Check if at the beginning of a line or only whitespace before.
462        let mut check_pos = start_pos;
463        while check_pos > 0 {
464            check_pos -= 1;
465            if let Some(ch) = state.source().get_char_at(check_pos) {
466                if ch == '\n' || ch == '\r' {
467                    break;
468                }
469                else if ch != ' ' && ch != '\t' {
470                    return false; // Non-whitespace characters before.
471                }
472            }
473        }
474
475        if let Some(ch) = state.peek() {
476            match ch {
477                '-' | '*' | '+' => {
478                    // Unordered list.
479                    state.advance(1);
480                    if let Some(next_ch) = state.peek() {
481                        if next_ch == ' ' || next_ch == '\t' {
482                            state.add_token(MarkdownTokenType::ListMarker, start_pos, state.get_position());
483                            return true;
484                        }
485                    }
486                    state.set_position(start_pos);
487                    false
488                }
489                '0'..='9' => {
490                    // Ordered list.
491                    while let Some(digit) = state.peek() {
492                        if digit.is_ascii_digit() { state.advance(1) } else { break }
493                    }
494
495                    if let Some('.') = state.peek() {
496                        state.advance(1);
497                        if let Some(next_ch) = state.peek() {
498                            if next_ch == ' ' || next_ch == '\t' {
499                                state.add_token(MarkdownTokenType::ListMarker, start_pos, state.get_position());
500                                return true;
501                            }
502                        }
503                    }
504
505                    state.set_position(start_pos);
506                    false
507                }
508                _ => false,
509            }
510        }
511        else {
512            false
513        }
514    }
515
516    /// Handles task markers.
517    fn lex_task_marker<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
518        let start_pos = state.get_position();
519
520        if let Some('[') = state.peek() {
521            state.advance(1);
522            if let Some(ch) = state.peek() {
523                if ch == ' ' || ch == 'x' || ch == 'X' {
524                    state.advance(1);
525                    if let Some(']') = state.peek() {
526                        state.advance(1);
527                        state.add_token(MarkdownTokenType::TaskMarker, start_pos, state.get_position());
528                        return true;
529                    }
530                }
531            }
532            state.set_position(start_pos);
533        }
534        false
535    }
536
537    /// Handles HTML tags or comments.
538    fn lex_html_tag<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
539        self.lex_any_tag(state, MarkdownTokenType::HtmlTag, MarkdownTokenType::HtmlComment)
540    }
541
542    /// Handles XML tags or comments.
543    fn lex_xml_tag<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
544        self.lex_any_tag(state, MarkdownTokenType::XmlTag, MarkdownTokenType::XmlComment)
545    }
546
547    /// Common tag handling logic.
548    fn lex_any_tag<S: Source + ?Sized>(&self, state: &mut State<S>, tag_kind: MarkdownTokenType, comment_kind: MarkdownTokenType) -> bool {
549        let start_pos = state.get_position();
550
551        if let Some('<') = state.peek() {
552            state.advance(1);
553
554            // Check if it is a comment <!-- -->.
555            if let Some('!') = state.peek() {
556                if state.source().get_char_at(state.get_position() + 1) == Some('-') && state.source().get_char_at(state.get_position() + 2) == Some('-') {
557                    state.advance(3);
558                    let mut found_end = false;
559                    while let Some(ch) = state.peek() {
560                        if ch == '-' && state.source().get_char_at(state.get_position() + 1) == Some('-') && state.source().get_char_at(state.get_position() + 2) == Some('>') {
561                            state.advance(3);
562                            found_end = true;
563                            break;
564                        }
565                        state.advance(ch.len_utf8());
566                    }
567                    if found_end {
568                        state.add_token(comment_kind, start_pos, state.get_position());
569                        return true;
570                    }
571                }
572            }
573
574            // Normal tag parsing.
575            let mut found_end = false;
576            let mut in_string = None; // Track if inside quotes.
577
578            while let Some(ch) = state.peek() {
579                if let Some(quote) = in_string {
580                    if ch == quote {
581                        in_string = None;
582                    }
583                }
584                else {
585                    if ch == '>' {
586                        state.advance(1);
587                        found_end = true;
588                        break;
589                    }
590                    else if ch == '"' || ch == '\'' {
591                        in_string = Some(ch);
592                    }
593                }
594                state.advance(ch.len_utf8());
595            }
596
597            if found_end {
598                state.add_token(tag_kind, start_pos, state.get_position());
599                true
600            }
601            else {
602                state.set_position(start_pos);
603                false
604            }
605        }
606        else {
607            false
608        }
609    }
610
611    /// Lexes blockquotes.
612    fn lex_blockquote<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
613        let start_pos = state.get_position();
614
615        // Check if we are at the start of a line or only preceded by whitespace.
616        let mut check_pos = start_pos;
617        while check_pos > 0 {
618            check_pos -= 1;
619            if let Some(ch) = state.source().get_char_at(check_pos) {
620                if ch == '\n' || ch == '\r' {
621                    break;
622                }
623                else if ch != ' ' && ch != '\t' {
624                    return false;
625                }
626            }
627        }
628
629        if let Some('>') = state.peek() {
630            state.advance(1);
631            state.add_token(MarkdownTokenType::BlockquoteMarker, start_pos, state.get_position());
632            true
633        }
634        else {
635            false
636        }
637    }
638
639    /// Lexes horizontal rules.
640    fn lex_horizontal_rule<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
641        let start_pos = state.get_position();
642
643        // Check if we are at the start of a line or only preceded by whitespace.
644        let mut check_pos = start_pos;
645        while check_pos > 0 {
646            check_pos -= 1;
647            if let Some(ch) = state.source().get_char_at(check_pos) {
648                if ch == '\n' || ch == '\r' {
649                    break;
650                }
651                else if ch != ' ' && ch != '\t' {
652                    return false;
653                }
654            }
655        }
656
657        if let Some(ch) = state.peek() {
658            if ch == '-' || ch == '*' || ch == '_' {
659                let rule_char = ch;
660                let mut count = 0;
661                let mut pos = start_pos;
662
663                // Count consecutive separators.
664                while let Some(current_ch) = state.source().get_char_at(pos) {
665                    if current_ch == rule_char {
666                        count += 1;
667                        pos += 1
668                    }
669                    else if current_ch == ' ' || current_ch == '\t' {
670                        pos += 1; // Allow spaces.
671                    }
672                    else {
673                        break;
674                    }
675                }
676
677                if count >= 3 {
678                    // Check until the end of the line.
679                    while let Some(current_ch) = state.source().get_char_at(pos) {
680                        if current_ch == '\n' || current_ch == '\r' {
681                            break;
682                        }
683                        else if current_ch == ' ' || current_ch == '\t' {
684                            pos += 1
685                        }
686                        else {
687                            return false; // Other characters found at the end of the line.
688                        }
689                    }
690
691                    state.set_position(pos);
692                    state.add_token(MarkdownTokenType::HorizontalRule, start_pos, state.get_position());
693                    return true;
694                }
695            }
696        }
697        false
698    }
699
700    /// Lexes math formulas.
701    fn lex_math<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
702        let start_pos = state.get_position();
703
704        if let Some('$') = state.peek() {
705            state.advance(1);
706            let mut is_block = false;
707
708            if let Some('$') = state.peek() {
709                state.advance(1);
710                is_block = true;
711            }
712
713            let mut found_end = false;
714            while let Some(ch) = state.peek() {
715                if ch == '$' {
716                    if is_block {
717                        if let Some('$') = state.source().get_char_at(state.get_position() + 1) {
718                            state.advance(2);
719                            found_end = true;
720                            break;
721                        }
722                    }
723                    else {
724                        state.advance(1);
725                        found_end = true;
726                        break;
727                    }
728                }
729                state.advance(ch.len_utf8())
730            }
731
732            if found_end {
733                let kind = if is_block { MarkdownTokenType::MathBlock } else { MarkdownTokenType::MathInline };
734                state.add_token(kind, start_pos, state.get_position());
735                true
736            }
737            else {
738                state.set_position(start_pos);
739                false
740            }
741        }
742        else {
743            false
744        }
745    }
746
747    /// Lexes front matter.
748    fn lex_front_matter<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
749        let start_pos = state.get_position();
750
751        // Must be at the start of the file.
752        if start_pos != 0 {
753            return false;
754        }
755
756        if state.peek() == Some('-') && state.source().get_char_at(1) == Some('-') && state.source().get_char_at(2) == Some('-') {
757            state.advance(3);
758            // Look for the end marker ---
759            let mut found_end = false;
760            while state.not_at_end() {
761                if state.peek() == Some('\n') || state.peek() == Some('\r') {
762                    state.advance(1);
763                    if state.peek() == Some('\n') {
764                        state.advance(1)
765                    }
766                    if state.peek() == Some('-') && state.source().get_char_at(state.get_position() + 1) == Some('-') && state.source().get_char_at(state.get_position() + 2) == Some('-') {
767                        state.advance(3);
768                        found_end = true;
769                        break;
770                    }
771                }
772                else {
773                    state.advance(1)
774                }
775            }
776
777            if found_end {
778                state.add_token(MarkdownTokenType::FrontMatter, start_pos, state.get_position());
779                true
780            }
781            else {
782                state.set_position(start_pos);
783                false
784            }
785        }
786        else {
787            false
788        }
789    }
790
791    /// Lexes footnotes.
792    fn lex_footnote<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
793        let start_pos = state.get_position();
794
795        if let Some('^') = state.peek() {
796            // Check if it's [^...
797            let check_pos = start_pos;
798            if check_pos > 0 && state.source().get_char_at(check_pos - 1) == Some('[') {
799                state.advance(1);
800                while let Some(ch) = state.peek() {
801                    if ch == ']' {
802                        state.advance(1);
803                        // Check if it's a definition [^...]:
804                        if state.peek() == Some(':') {
805                            state.advance(1);
806                            state.add_token(MarkdownTokenType::FootnoteDefinition, start_pos - 1, state.get_position())
807                        }
808                        else {
809                            state.add_token(MarkdownTokenType::FootnoteReference, start_pos - 1, state.get_position())
810                        }
811                        return true;
812                    }
813                    else if ch == '\n' || ch == '\r' {
814                        break;
815                    }
816                    state.advance(ch.len_utf8())
817                }
818            }
819            state.set_position(start_pos);
820        }
821        false
822    }
823
824    /// Lexes superscripts and subscripts.
825    fn lex_sub_superscript<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
826        let start_pos = state.get_position();
827
828        if let Some(ch) = state.peek() {
829            let marker = ch;
830            if marker == '^' || marker == '~' {
831                state.advance(1);
832                let mut found_end = false;
833                while let Some(next_ch) = state.peek() {
834                    if next_ch == marker {
835                        state.advance(1);
836                        found_end = true;
837                        break;
838                    }
839                    else if next_ch == ' ' || next_ch == '\t' || next_ch == '\n' || next_ch == '\r' {
840                        break;
841                    }
842                    state.advance(next_ch.len_utf8())
843                }
844
845                if found_end {
846                    let kind = if marker == '^' { MarkdownTokenType::Superscript } else { MarkdownTokenType::Subscript };
847                    state.add_token(kind, start_pos, state.get_position());
848                    true
849                }
850                else {
851                    state.set_position(start_pos);
852                    false
853                }
854            }
855            else {
856                false
857            }
858        }
859        else {
860            false
861        }
862    }
863
864    /// Handles indented code blocks.
865    fn lex_indented_code_block<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
866        let start_pos = state.get_position();
867
868        // Must be at the beginning of a line.
869        if start_pos > 0 {
870            if let Some(prev_char) = state.source().get_char_at(start_pos - 1) {
871                if prev_char != '\n' && prev_char != '\r' {
872                    return false;
873                }
874            }
875        }
876
877        // Check indentation (4 spaces or 1 tab).
878        let mut indent_count = 0;
879        let mut pos = start_pos;
880        while let Some(ch) = state.source().get_char_at(pos) {
881            if ch == ' ' {
882                indent_count += 1;
883                pos += 1;
884                if indent_count == 4 {
885                    break;
886                }
887            }
888            else if ch == '\t' {
889                indent_count = 4;
890                pos += 1;
891                break;
892            }
893            else {
894                break;
895            }
896        }
897
898        if indent_count >= 4 {
899            state.set_position(pos);
900            state.add_token(MarkdownTokenType::CodeBlock, start_pos, state.get_position());
901            true
902        }
903        else {
904            false
905        }
906    }
907
908    /// Lexes special characters.
909    fn lex_special_char<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
910        let start_pos = state.get_position();
911
912        if let Some(ch) = state.peek() {
913            let token_kind = match ch {
914                '[' => MarkdownTokenType::LBracket,
915                ']' => MarkdownTokenType::RBracket,
916                '(' => MarkdownTokenType::LParen,
917                ')' => MarkdownTokenType::RParen,
918                '<' => MarkdownTokenType::Less,
919                '>' => MarkdownTokenType::Greater,
920                '*' => MarkdownTokenType::Asterisk,
921                '_' => MarkdownTokenType::Underscore,
922                '`' => MarkdownTokenType::Backtick,
923                '~' => MarkdownTokenType::Tilde,
924                '#' => MarkdownTokenType::Hash,
925                '|' => MarkdownTokenType::Pipe,
926                '-' => MarkdownTokenType::Dash,
927                '+' => MarkdownTokenType::Plus,
928                '.' => MarkdownTokenType::Dot,
929                ':' => MarkdownTokenType::Colon,
930                '!' => MarkdownTokenType::Exclamation,
931                '\\' => MarkdownTokenType::Escape,
932                '$' => MarkdownTokenType::Dollar,
933                '^' => MarkdownTokenType::Caret,
934                _ => return false,
935            };
936
937            state.advance(ch.len_utf8());
938            state.add_token(token_kind, start_pos, state.get_position());
939            true
940        }
941        else {
942            false
943        }
944    }
945
946    /// Lexes plain text.
947    fn lex_text<S: Source + ?Sized>(&self, state: &mut State<S>) -> bool {
948        let start_pos = state.get_position();
949
950        while let Some(ch) = state.peek() {
951            // Stop when encountering a special character.
952            match ch {
953                ' ' | '\t' | '\n' | '\r' | '#' | '*' | '_' | '`' | '~' | '[' | ']' | '(' | ')' | '<' | '>' | '|' | '-' | '+' | '.' | ':' | '!' | '\\' | '$' | '^' => break,
954                _ => {
955                    state.advance(ch.len_utf8());
956                }
957            }
958        }
959
960        if state.get_position() > start_pos {
961            state.add_token(MarkdownTokenType::Text, start_pos, state.get_position());
962            true
963        }
964        else {
965            false
966        }
967    }
968}
969
970impl<'config> Lexer<MarkdownLanguage> for MarkdownLexer<'config> {
971    fn lex<'a, S: Source + ?Sized>(&self, text: &'a S, _edits: &[TextEdit], cache: &'a mut impl LexerCache<MarkdownLanguage>) -> LexOutput<MarkdownLanguage> {
972        let mut state = State::new(text);
973        let result = self.run(&mut state);
974        if result.is_ok() {
975            state.add_eof();
976        }
977        state.finish_with_cache(result, cache)
978    }
979}
980
981impl<'config> MarkdownLexer<'config> {
982    /// Runs the lexer on the given source and returns the output.
983    pub fn lex_internal<'a, S: Source + ?Sized>(&self, source: &'a S) -> LexOutput<MarkdownLanguage> {
984        let mut state = State::new(source);
985        let result = self.run(&mut state);
986        state.finish(result)
987    }
988}