1use std::fmt::Debug;
2use std::{cell::RefCell, rc::Rc};
3
4use crate::internal::*;
5
6pub struct Parser<'arena> {
7 pub(super) bump: &'arena Bump,
8 pub(super) lexer: Lexer<'arena>,
9 pub(super) document: Document<'arena>,
10 pub(super) peeked_lines: Option<ContiguousLines<'arena>>,
11 pub(super) peeked_meta: Option<ChunkMeta<'arena>>,
12 pub(super) ctx: ParseContext<'arena>,
13 pub(super) errors: RefCell<Vec<Diagnostic>>,
14 pub(super) strict: bool, pub(super) include_resolver: Option<Box<dyn IncludeResolver>>,
16 #[cfg(feature = "attr_ref_observation")]
17 pub(super) attr_ref_observer: Option<Box<dyn AttrRefObserver>>,
18}
19
20impl<'arena> Parser<'arena> {
21 pub fn new(src: BumpVec<'arena, u8>, file: SourceFile, bump: &'arena Bump) -> Self {
22 Parser::from_lexer(Lexer::new(src, file, bump))
23 }
24
25 pub fn from_str(src: &str, file: SourceFile, bump: &'arena Bump) -> Self {
26 Parser::from_lexer(Lexer::from_str(bump, file, src))
27 }
28
29 fn from_lexer(lexer: Lexer<'arena>) -> Self {
30 let mut parser = Parser {
31 bump: lexer.bump,
32 document: Document::new(lexer.bump),
33 peeked_lines: None,
34 peeked_meta: None,
35 ctx: ParseContext::new(lexer.bump),
36 errors: RefCell::new(Vec::new()),
37 strict: true,
38 include_resolver: None,
39 lexer,
40 #[cfg(feature = "attr_ref_observation")]
41 attr_ref_observer: None,
42 };
43 parser.set_source_file_attrs();
44 parser
45 }
46
47 pub fn apply_job_settings(&mut self, settings: JobSettings) {
48 if let Some(leveloffset) = settings.job_attrs.get("leveloffset") {
49 Parser::adjust_leveloffset(&mut self.ctx.leveloffset, &leveloffset.value);
50 }
51 self.strict = settings.strict;
52 self.ctx.max_include_depth = settings.job_attrs.u16("max-include-depth").unwrap_or(64);
53 self.document.meta = settings.into();
54 self.set_source_file_attrs();
55 }
56
57 pub fn register_plugin_macros(&mut self, names: &[impl AsRef<str>]) {
58 self.lexer.register_plugin_macros(names);
59 }
60
61 pub fn provide_timestamps(
62 &mut self,
63 now: u64,
64 input_modified_time: Option<u64>,
65 reproducible_override: Option<u64>,
66 ) {
67 self.set_datetime_attrs(now, input_modified_time, reproducible_override);
68 }
69
70 pub fn set_resolver(&mut self, resolver: Box<dyn IncludeResolver>) {
71 self.include_resolver = Some(resolver);
72 }
73
74 #[cfg(feature = "attr_ref_observation")]
75 pub fn set_attr_ref_observer(&mut self, observer: Box<dyn AttrRefObserver>) {
76 self.attr_ref_observer = Some(observer);
77 }
78
79 pub fn cell_parser(&mut self, src: BumpVec<'arena, u8>, offset: u32) -> Parser<'arena> {
80 let mut cell_parser = Parser::new(src, self.lexer.source_file().clone(), self.bump);
81 cell_parser.include_resolver = self.include_resolver.as_ref().map(|r| r.clone_box());
82 cell_parser.strict = self.strict;
83 cell_parser.lexer.adjust_offset(offset);
84 cell_parser.ctx = self.ctx.clone_for_cell(self.bump);
85 cell_parser.document.meta = self.document.meta.clone_for_cell();
86 cell_parser.document.anchors = Rc::clone(&self.document.anchors);
87
88 #[cfg(feature = "attr_ref_observation")]
89 {
90 cell_parser.attr_ref_observer = self.attr_ref_observer.take();
91 }
92
93 cell_parser
94 }
95
96 pub(crate) fn loc(&self) -> SourceLocation {
97 self
98 .peeked_lines
99 .as_ref()
100 .and_then(|lines| lines.first_loc())
101 .unwrap_or_else(|| self.lexer.loc())
102 }
103
104 pub(crate) fn read_line(&mut self) -> Result<Option<Line<'arena>>> {
105 Ok(self._read_line(false)?.map(|(line, _)| line))
106 }
107
108 fn _read_line(&mut self, ignored_last: bool) -> Result<Option<(Line<'arena>, bool)>> {
109 assert!(self.peeked_lines.is_none());
110 if self.lexer.is_eof() {
111 return Ok(None);
112 }
113
114 use TokenKind::*;
115 let mut drop_line = false;
116 let mut line = Line::empty(self.bump);
117 while !self.lexer.at_newline() && !self.lexer.is_eof() {
118 let mut token = self.lexer.next_token();
119 if line.is_empty() {
120 if self.ctx.in_header
123 && !matches!(
124 token.kind,
125 Colon | EqualSigns | Word | ForwardSlashes | Directive | OpenBracket
126 )
127 {
128 self.ctx.in_header = false;
129 } else if token.kind == Colon && self.ctx.subs.attr_refs() {
130 self.try_parse_attr_def(&mut token)?;
131 }
132 }
133 self.push_token_replacing_attr_ref(token, &mut line, &mut drop_line)?;
134 }
135 self.lexer.skip_newline();
136 if drop_line {
137 return self._read_line(false);
138 }
139 if line.starts(TokenKind::Directive) && !self.ctx.within_block_comment() {
140 match self.try_process_directive(&mut line)? {
141 DirectiveAction::Passthrough => Ok(Some((line, ignored_last))),
142 DirectiveAction::SubstituteLine(line) => Ok(Some((line, ignored_last))),
143 DirectiveAction::IgnoreNotIncluded => self._read_line(true),
144 DirectiveAction::ReadNextLine => self._read_line(false),
145 DirectiveAction::SkipLinesUntilEndIf => Ok(
146 self
147 .skip_lines_until_endif(&line)?
148 .map(|l| (l, ignored_last)),
149 ),
150 }
151 } else {
152 Ok(Some((line, ignored_last)))
153 }
154 }
155
156 pub(crate) fn read_lines(&mut self) -> Result<Option<ContiguousLines<'arena>>> {
157 self.ctx.comment_delim_in_lines = false;
158 if let Some(peeked) = self.peeked_lines.take() {
159 return Ok(Some(peeked));
160 }
161 self.lexer.consume_empty_lines();
162 if self.lexer.is_eof() {
163 return Ok(None);
164 }
165 let mut lines = Deq::new(self.bump);
166 while let Some((line, ignored_removed_include_line)) = self._read_line(false)? {
167 if line.is_emptyish() {
168 if lines.is_empty() {
169 continue;
173 } else if !ignored_removed_include_line {
174 break;
177 }
178 }
179 if line.is_delimiter_kind(DelimiterKind::Comment) {
180 self.ctx.comment_delim_in_lines = true;
181 }
182 lines.push(line);
183 if self.lexer.at_newline() {
184 break;
185 }
186 }
187 if lines.is_empty() {
188 Ok(None)
189 } else {
190 Ok(Some(ContiguousLines::new(lines)))
191 }
192 }
193
194 pub(crate) fn read_lines_until(
195 &mut self,
196 delimiter: Delimiter,
197 ) -> Result<Option<ContiguousLines<'arena>>> {
198 let Some(mut lines) = self.read_lines()? else {
199 return Ok(None);
200 };
201 if lines.any(|l| l.is_delimiter(delimiter)) {
202 return Ok(Some(lines));
203 }
204
205 let mut additional_lines = BumpVec::new_in(self.bump);
206 while !self.lexer.is_eof() && !self.at_delimiter(delimiter) {
207 additional_lines.push(self.read_line()?.unwrap());
208 }
209 lines.extend(additional_lines);
210
211 while lines.last().map(|l| l.is_empty()) == Some(true) {
212 lines.pop();
213 }
214 Ok(Some(lines))
215 }
216
217 fn at_delimiter(&self, delimiter: Delimiter) -> bool {
218 match delimiter.kind {
219 DelimiterKind::BlockQuote => self.lexer.at_delimiter_line() == Some((4, b'_')),
220 DelimiterKind::Example => {
221 self.lexer.at_delimiter_line() == Some((delimiter.len as u32, b'='))
222 }
223 DelimiterKind::Open => self.lexer.at_delimiter_line() == Some((2, b'-')),
224 DelimiterKind::Sidebar => self.lexer.at_delimiter_line() == Some((4, b'*')),
225 DelimiterKind::Listing => {
226 if delimiter.len == 3 {
227 self.lexer.at_delimiter_line() == Some((3, b'`'))
228 } else {
229 self.lexer.at_delimiter_line() == Some((4, b'-'))
230 }
231 }
232 DelimiterKind::Literal => self.lexer.at_delimiter_line() == Some((4, b'.')),
233 DelimiterKind::Passthrough => self.lexer.at_delimiter_line() == Some((4, b'+')),
234 DelimiterKind::Comment => self.lexer.at_delimiter_line() == Some((4, b'/')),
235 }
236 }
237
238 pub(crate) fn restore_lines(&mut self, lines: ContiguousLines<'arena>) {
239 debug_assert!(self.peeked_lines.is_none());
240 if !lines.is_empty() {
241 self.peeked_lines = Some(lines);
242 }
243 }
244
245 pub(crate) fn restore_peeked_meta(&mut self, meta: ChunkMeta<'arena>) {
246 if !meta.is_empty() {
247 debug_assert!(self.peeked_meta.is_none());
248 self.peeked_meta = Some(meta);
249 }
250 }
251
252 pub(crate) fn restore_peeked(&mut self, lines: ContiguousLines<'arena>, meta: ChunkMeta<'arena>) {
253 self.restore_lines(lines);
254 self.restore_peeked_meta(meta);
255 }
256
257 pub fn parse(mut self) -> std::result::Result<ParseResult<'arena>, Vec<Diagnostic>> {
258 self.parse_document_header()?;
259 self.prepare_toc();
260
261 if self.document.meta.get_doctype() == DocType::Inline {
264 if self.peeked_lines.is_none() {
265 self.peeked_lines = self.read_lines().expect("tmp");
267 }
268 self.lexer.truncate();
269 }
270
271 if let Some(book_content) = self.parse_book()? {
272 self.document.content = book_content;
273 } else {
274 let sectioned = self.parse_sectioned()?;
275 self.document.content = sectioned.into_doc_content(self.bump);
276 }
277
278 self.document.meta.clear_doc_attrs();
280 self.diagnose_document()?;
281 Ok(self.into())
282 }
283
284 pub(crate) fn parse_sectioned(&mut self) -> Result<Sectioned<'arena>> {
285 let mut blocks = bvec![in self.bump];
286 while let Some(block) = self.parse_block()? {
287 blocks.push(block);
288 }
289 let preamble = if blocks.is_empty() { None } else { Some(blocks) };
290 let mut sections = bvec![in self.bump];
291 while let Some(section) = self.parse_section()? {
292 sections.push(section);
293 }
294 Ok(Sectioned { preamble, sections })
295 }
296
297 pub(crate) fn parse_chunk_meta(
298 &mut self,
299 lines: &mut ContiguousLines<'arena>,
300 ) -> Result<ChunkMeta<'arena>> {
301 if let Some(meta) = self.peeked_meta.take() {
302 return Ok(meta);
303 }
304 assert!(!lines.is_empty());
305 let start_loc = lines.current_token().unwrap().loc;
306 let mut attrs = MultiAttrList::new_in(self.bump);
307 let mut title = None;
308 if !lines.current().unwrap().is_fully_unconsumed() {
309 return Ok(ChunkMeta::new(attrs, title, start_loc));
310 }
311 loop {
312 match lines.current() {
313 Some(line) if line.is_chunk_title() => {
314 let mut line = lines.consume_current().unwrap();
315 line.discard_assert(TokenKind::Dots);
316 title = Some(self.parse_inlines(&mut line.into_lines())?);
317 }
318 Some(line) if line.is_block_attr_list() => {
319 let mut line = lines.consume_current().unwrap();
320 line.discard_assert(TokenKind::OpenBracket);
321 attrs.push(self.parse_block_attr_list(&mut line)?);
322 }
323 Some(line) if line.is_block_anchor() => {
324 let mut line = lines.consume_current().unwrap();
325 let first = line.discard_assert(TokenKind::OpenBracket);
326 line.discard_assert(TokenKind::OpenBracket);
327 let Some(anchor) = self.parse_block_anchor(&mut line)? else {
328 self.err_line_starting("Invalid block anchor", first.loc)?;
329 return Ok(ChunkMeta::new(attrs, title, start_loc));
330 };
331 let mut anchor_attrs = AttrList::new(anchor.loc, self.bump);
332 anchor_attrs.id = Some(anchor.id);
333 anchor_attrs.positional.push(anchor.reftext);
334 attrs.push(anchor_attrs);
335 }
336 Some(line) if line.is_comment() && (!attrs.is_empty() || title.is_some()) => {
338 lines.consume_current();
339 }
340 _ => break,
341 }
342 }
343 Ok(ChunkMeta::new(attrs, title, start_loc))
344 }
345
346 pub(crate) fn string(&self, s: &str) -> BumpString<'arena> {
347 BumpString::from_str_in(s, self.bump)
348 }
349}
350
351pub trait HasArena<'arena> {
352 fn bump(&self) -> &'arena Bump;
353 fn token(&self, kind: TokenKind, lexeme: &str, loc: SourceLocation) -> Token<'arena> {
354 Token::new(kind, loc, BumpString::from_str_in(lexeme, self.bump()))
355 }
356}
357
358impl<'arena> HasArena<'arena> for Parser<'arena> {
359 fn bump(&self) -> &'arena Bump {
360 self.bump
361 }
362}
363
364pub enum DirectiveAction<'arena> {
365 Passthrough,
366 ReadNextLine,
367 IgnoreNotIncluded,
368 SkipLinesUntilEndIf,
369 SubstituteLine(Line<'arena>),
370}
371
372#[derive(Debug, Clone, PartialEq, Eq)]
373pub enum SourceFile {
374 Stdin { cwd: Path },
375 Path(Path),
376 Tmp,
377}
378
379impl SourceFile {
380 pub fn file_name(&self) -> &str {
381 match self {
382 SourceFile::Stdin { .. } => "<stdin>",
383 SourceFile::Path(path) => path.file_name(),
384 SourceFile::Tmp => "<temp-buffer>",
385 }
386 }
387
388 pub fn matches_xref_target(&self, target: &str) -> bool {
389 let SourceFile::Path(path) = self else {
390 return false;
391 };
392 let filename = path.file_name();
393 if filename == target {
394 return true;
395 }
396 let xref_ext = file::ext(target);
397 let path_ext = file::ext(filename);
398 if xref_ext.is_some() && xref_ext != path_ext {
399 return false;
400 }
401 let fullpath = path.to_string();
402 if fullpath.ends_with(target) {
403 true
404 } else if xref_ext.is_some() {
405 false
406 } else {
407 file::remove_ext(&fullpath).ends_with(target)
408 }
409 }
410}
411
412impl From<Diagnostic> for Vec<Diagnostic> {
413 fn from(diagnostic: Diagnostic) -> Self {
414 vec![diagnostic]
415 }
416}
417
418#[cfg(test)]
419mod tests {
420 use super::*;
421 use test_utils::*;
422
423 fn resolve(src: &'static str) -> Box<dyn IncludeResolver> {
424 #[derive(Clone)]
425 struct MockResolver(pub Vec<u8>);
426 impl IncludeResolver for MockResolver {
427 fn resolve(
428 &mut self,
429 _: IncludeTarget,
430 buffer: &mut dyn IncludeBuffer,
431 ) -> std::result::Result<usize, ResolveError> {
432 buffer.initialize(self.0.len());
433 let bytes = buffer.as_bytes_mut();
434 bytes.copy_from_slice(&self.0);
435 Ok(self.0.len())
436 }
437 fn get_base_dir(&self) -> Option<String> {
438 Some("/".to_string())
439 }
440 fn clone_box(&self) -> Box<dyn IncludeResolver> {
441 Box::new(self.clone())
442 }
443 }
444 Box::new(MockResolver(Vec::from(src.as_bytes())))
445 }
446
447 fn reassemble(lines: ContiguousLines) -> String {
448 lines
449 .iter()
450 .map(|l| l.reassemble_src())
451 .collect::<Vec<_>>()
452 .join("\n")
453 }
454
455 #[test]
456 fn test_attr_ref() {
457 let mut parser = test_parser!("hello {foo} world");
458 parser
459 .document
460 .meta
461 .insert_doc_attr("foo", "_bar_")
462 .unwrap();
463 let mut lines = parser.read_lines().unwrap().unwrap();
464 let line = lines.consume_current().unwrap();
465 let tokens = line.into_iter().collect::<Vec<_>>();
466 expect_eq!(
467 &tokens,
468 &[
469 Token::new(TokenKind::Word, loc!(0..5), bstr!("hello")),
470 Token::new(TokenKind::Whitespace, loc!(5..6), bstr!(" ")),
471 Token::new(TokenKind::AttrRef, loc!(6..11), bstr!("{foo}")),
472 Token::new(TokenKind::Underscore, loc!(6..11), bstr!("_")),
476 Token::new(TokenKind::Word, loc!(6..11), bstr!("bar")),
477 Token::new(TokenKind::Underscore, loc!(6..11), bstr!("_")),
478 Token::new(TokenKind::Whitespace, loc!(11..12), bstr!(" ")),
480 Token::new(TokenKind::Word, loc!(12..17), bstr!("world")),
481 ]
482 );
483 }
484
485 #[test]
486 fn invalid_directive_line_passed_thru() {
487 let input = adoc! {"
488 foo
489 include::invalid []
490 bar
491 "};
492
493 let mut parser = test_parser!(input);
494 assert_eq!(
495 reassemble(parser.read_lines().unwrap().unwrap()),
496 input.trim_end()
497 );
498 }
499
500 #[test]
501 fn safe_mode_include_to_link() {
502 let input = adoc! {"
503 foo
504 include::include-file.adoc[]
505 baz
506 "};
507
508 let mut parser = test_parser!(input);
509 parser.apply_job_settings(JobSettings::secure());
510 assert_eq!(
511 reassemble(parser.read_lines().unwrap().unwrap()),
512 adoc! {"
513 foo
514 link:include-file.adoc[role=include,]
515 baz"
516 }
517 );
518
519 let mut parser = test_parser!(input);
521 parser.apply_job_settings(JobSettings::secure());
522
523 let mut line = parser.read_line().unwrap().unwrap();
524 expect_eq!(
525 line.consume_current().unwrap(),
526 Token::new(TokenKind::Word, loc!(0..3), bstr!("foo"))
527 );
528 assert!(line.consume_current().is_none());
529
530 assert_eq!(&input[8..13], "ude::");
531 assert_eq!(&input[30..32], "[]");
532
533 let mut line = parser.read_line().unwrap().unwrap();
534 expect_eq!(
535 std::array::from_fn(|_| line.consume_current().unwrap()),
536 [
537 Token::new(TokenKind::MacroName, loc!(8..13), bstr!("link:")),
540 Token::new(TokenKind::Word, loc!(13..20), bstr!("include")),
541 Token::new(TokenKind::Dashes, loc!(20..21), bstr!("-")),
542 Token::new(TokenKind::Word, loc!(21..25), bstr!("file")),
543 Token::new(TokenKind::Dots, loc!(25..26), bstr!(".")),
544 Token::new(TokenKind::Word, loc!(26..30), bstr!("adoc")),
545 Token::new(TokenKind::OpenBracket, loc!(30..31), bstr!("[")),
546 Token::new(TokenKind::Word, loc!(31..31), bstr!("role")),
549 Token::new(TokenKind::EqualSigns, loc!(31..31), bstr!("=")),
550 Token::new(TokenKind::Word, loc!(31..31), bstr!("include")),
551 Token::new(TokenKind::Comma, loc!(31..31), bstr!(",")),
552 Token::new(TokenKind::CloseBracket, loc!(31..32), bstr!("]")),
554 ]
555 );
556 assert!(line.consume_current().is_none());
557 }
558
559 #[test]
560 fn attrs_preserved_when_replacing_include() {
561 let input = "include::some-file.adoc[leveloffset+=1]";
562 let mut parser = test_parser!(input);
563 parser.apply_job_settings(JobSettings::secure());
564 assert_eq!(
565 parser.read_line().unwrap().unwrap().reassemble_src(),
566 "link:some-file.adoc[role=include,leveloffset+=1]"
567 );
568 }
569
570 #[test]
571 fn spaces_in_include_file_to_pass_macro_link() {
572 let input = "include::foo bar baz.adoc[]";
573 let mut parser = test_parser!(input);
574 parser.apply_job_settings(JobSettings::secure());
575 assert_eq!(
576 parser.read_line().unwrap().unwrap().reassemble_src(),
577 "link:pass:c[foo bar baz.adoc][role=include,]"
578 );
579 }
580
581 #[test]
582 fn uri_read_not_allowed_include_non_strict() {
583 let input = "include::https://my.com/foo bar.adoc[]";
585 let mut parser = test_parser!(input);
586 let mut settings = JobSettings::r#unsafe();
587 settings.strict = false;
588 parser.apply_job_settings(settings);
589 expect_eq!(
590 parser.read_line().unwrap().unwrap().reassemble_src(),
591 "link:pass:c[https://my.com/foo bar.adoc][role=include,]",
592 from: input
593 );
594 }
595}