1pub mod lexer;
4
5use std::path::Path;
6
7use crate::{
8 ast::{Directive, DirectiveTrait},
9 lexer::{line_column2, Literal},
10};
11
12use self::lexer::*;
13
14use anyhow::Context;
15use nom::{
16 combinator::{fail, map},
17 error::{ContextError, ParseError, VerboseError},
18 multi::many0,
19};
20
21#[derive(Debug, Clone, Default)]
22pub struct Nginx;
23
24impl DirectiveTrait<Nginx> for Directive<Nginx> {
25 fn parse(input: &[u8]) -> anyhow::Result<Vec<Self>> {
26 let res = parse_block(input).map_err(|err| {
27 err.map(|e| {
28 let errs = e
29 .errors
30 .iter()
31 .map(|(i, code)| {
32 let ((l, c), pos) = line_column2(input, i).unwrap();
33 format!("0x{pos:x}({l}:{c}) err: {:?}", code)
34 })
35 .collect::<Vec<_>>();
36 anyhow::anyhow!("{}", errs.join("\n"))
37 })
38 })?;
39 Ok(res.1)
40 }
41
42 fn resolve_include_inner(mut self, dir: &Path, out: &mut Vec<Self>) -> anyhow::Result<()> {
43 if self.name == "include" {
44 let path = Path::new(
45 self.args
46 .get(0)
47 .context("include directive expect one arg")?
48 .as_str(),
49 );
50 for path in glob::glob(
51 &if path.is_absolute() {
52 path.to_path_buf()
53 } else {
54 dir.join(path)
55 }
56 .to_string_lossy(),
57 )?
58 .flatten()
59 {
60 let data = std::fs::read(&path)?;
61 for c in Self::parse(&data).with_context(|| format!("parse {path:?}"))? {
62 c.resolve_include_inner(dir, out)?;
63 }
64 }
65 } else {
66 self.resolve_include(dir)?;
67 out.push(self);
68 }
69 Ok(())
70 }
71}
72
73fn parse_literal(input: &[u8]) -> IResult<&[u8], Literal<'_>> {
74 let (rest, tok) = tokenizer(input)?;
75 match tok {
76 Token::Literal(l) => Ok((rest, l)),
77 Token::Eof | Token::BlockEnd => Ok((rest, Default::default())),
78 _else => fail(input),
79 }
80}
81
82fn parse_block(mut input: &[u8]) -> IResult<&[u8], Vec<Directive<Nginx>>> {
83 let mut result = vec![];
84 loop {
85 let mut d = Directive::default();
86 let (rest, tag) = tokenizer(input).map_err(|err| {
87 err.map(|err| VerboseError::add_context(input, "unexpected item token", err))
88 })?;
89
90 let lit = match tag {
91 Token::Literal(lit) => lit,
92 Token::BlockEnd | Token::Eof => break,
93 _ => return fail(input),
94 };
95 d.name = lit.clone().into();
96 let (rest, args) = map(many0(parse_literal), |v| {
97 v.into_iter().map(Into::into).collect()
98 })(rest)?;
99 d.args = args;
100
101 let (rest, tok) = tokenizer(rest)?;
102 match tok {
103 Token::Semicolon | Token::NewLine => {
104 input = rest;
105 }
106 Token::Eof => break,
107 Token::BlockStart if lit.raw.ends_with("_by_lua_block") => {
108 use luaparse::token::*;
109
110 let mut pairs = 1usize;
111 let cow = String::from_utf8_lossy(rest);
112 let mut lexer = luaparse::Lexer::new(luaparse::InputCursor::new(&cow));
113 while let Some(Ok(tok)) = lexer.next() {
114 match tok.value {
115 TokenValue::Symbol(Symbol::CurlyBracketLeft) => pairs += 1,
116 TokenValue::Symbol(Symbol::CurlyBracketRight) => pairs -= 1,
117 _ => {}
118 }
119 if pairs == 0 {
120 break;
121 }
122 }
123
124 input = &rest[lexer.cursor().pos().byte..];
125 }
126 Token::BlockStart => {
127 let (rest, res) = parse_block(rest)?;
128 d.children.replace(res);
129 let (rest, tok) = tokenizer(rest)?;
130 if tok != Token::BlockEnd {
131 return Err(nom::Err::Failure(VerboseError::add_context(
132 input,
133 "expected block end brace",
134 VerboseError::from_error_kind(input, nom::error::ErrorKind::Fail),
135 )));
136 }
137 input = rest;
138 }
139 _ => {
140 fail::<_, (), _>(rest)?;
141 }
142 }
143
144 result.push(d);
145 }
146 Ok((input, result))
147}