libluaudoc/
source_file.rs1use crate::{diagnostic::Diagnostics, doc_comment::DocComment, doc_entry::DocEntry, error::Error};
2use full_moon::{
3 self,
4 ast::{LastStmt, Stmt},
5 node::Node,
6 tokenizer::{Token, TokenReference, TokenType},
7 visitors::Visitor,
8};
9
10#[derive(Debug)]
11pub struct SourceFile {
12 doc_comments: Vec<DocComment>,
13 file_id: usize,
14}
15
16impl<'a> SourceFile {
17 pub fn from_str(source: &'a str, file_id: usize, relative_path: String) -> Result<Self, Error> {
18 let ast = full_moon::parse(source).map_err(|e| Error::FullMoonError(e.to_string()))?;
19
20 struct Collector<'b> {
21 buffer: Vec<(Token, Option<Stmt>)>,
22 last_line: usize,
23 file_id: usize,
24 relative_path: &'b str,
25 doc_comments: Vec<DocComment>,
26 }
27
28 impl<'b> Collector<'b> {
29 fn new(file_id: usize, relative_path: &'b str) -> Self {
30 Self {
31 buffer: Vec::new(),
32 file_id,
33 last_line: 0,
34 relative_path,
35 doc_comments: Vec::new(),
36 }
37 }
38
39 fn scan(&mut self, token: Token, stmt: Option<Stmt>) {
40 match token.token_type() {
41 TokenType::MultiLineComment { blocks: 1, comment } => {
42 self.last_line = token.end_position().line();
43 self.clear();
44
45 self.doc_comments.push(DocComment::new(
46 comment.to_string(),
47 token.start_position().bytes() + "--[=[".len(),
48 token.end_position().line() + 1,
49 self.file_id,
50 self.relative_path.to_owned(),
51 stmt,
52 ));
53 }
54 TokenType::SingleLineComment { comment } => {
55 self.last_line = token.start_position().line();
56
57 if let Some(comment) = comment.strip_prefix('-') {
58 if comment.trim().chars().all(|char| char == '-') {
59 return;
61 }
62
63 if comment.len() > 1 {
64 if let Some(first_non_whitespace) =
65 comment.find(|char: char| !char.is_whitespace())
66 {
67 let tag_body = &comment[first_non_whitespace..];
70
71 if tag_body.starts_with("@module") {
72 return;
73 }
74 }
75 }
76
77 self.buffer.push((token, stmt));
78 } else if let Some(doc_comment) = self.flush() {
79 self.doc_comments.push(doc_comment);
80 }
81 }
82 TokenType::Whitespace { .. } => {
83 let line = token.start_position().line();
84 let is_consecutive_newline = line > self.last_line;
85
86 self.last_line = line;
87
88 if is_consecutive_newline {
89 if let Some(doc_comment) = self.flush() {
90 self.doc_comments.push(doc_comment);
91 }
92 }
93 }
94 _ => {}
95 }
96 }
97
98 fn clear(&mut self) {
99 self.buffer.clear();
100 }
101
102 fn flush(&mut self) -> Option<DocComment> {
103 if self.buffer.is_empty() {
104 return None;
105 }
106
107 let comment = self
108 .buffer
109 .iter()
110 .map(|(token, _)| match token.token_type() {
111 TokenType::SingleLineComment { comment } => {
112 format!("--{}", comment)
113 }
114 _ => unreachable!(),
115 })
116 .collect::<Vec<_>>()
117 .join("\n");
118
119 let doc_comment = Some(DocComment::new(
120 comment,
121 self.buffer.first().unwrap().0.start_position().bytes(),
122 self.buffer.last().unwrap().0.end_position().line() + 1,
123 self.file_id,
124 self.relative_path.to_owned(),
125 self.buffer.last().unwrap().1.as_ref().cloned(),
126 ));
127
128 self.clear();
129
130 doc_comment
131 }
132
133 fn finish(mut self) -> Vec<DocComment> {
134 if let Some(doc_comment) = self.flush() {
135 self.doc_comments.push(doc_comment);
136 }
137
138 self.doc_comments
139 }
140 }
141
142 impl Visitor for Collector<'_> {
143 fn visit_stmt(&mut self, stmt: &Stmt) {
144 let surrounding_trivia = stmt.surrounding_trivia().0;
145 for trivia in surrounding_trivia {
146 self.scan(trivia.clone(), Some(stmt.clone()));
147 }
148 }
149
150 fn visit_last_stmt(&mut self, stmt: &LastStmt) {
151 let stmt = stmt.clone();
152 let surrounding_trivia = stmt.surrounding_trivia().0;
153 for trivia in surrounding_trivia {
154 self.scan(trivia.clone(), None);
155 }
156 }
157
158 fn visit_eof(&mut self, stmt: &TokenReference) {
159 let surrounding_trivia = stmt.surrounding_trivia().0;
160 for trivia in surrounding_trivia {
161 self.scan(trivia.clone(), None);
162 }
163 }
164 }
165
166 let mut collector = Collector::new(file_id, &relative_path);
167
168 collector.visit_ast(&ast);
169
170 let doc_comments = collector.finish();
171
172 Ok(Self {
173 doc_comments,
174 file_id,
175 })
176 }
177
178 pub fn parse(&'a self) -> Result<Vec<DocEntry>, Error> {
179 let (doc_entries, errors): (Vec<_>, Vec<_>) = self
180 .doc_comments
181 .iter()
182 .map(DocEntry::parse)
183 .partition(Result::is_ok);
184 let doc_entries: Vec<_> = doc_entries.into_iter().map(Result::unwrap).collect();
185 let errors: Diagnostics = errors
186 .into_iter()
187 .map(Result::unwrap_err)
188 .flat_map(Diagnostics::into_iter)
189 .collect::<Vec<_>>()
190 .into();
191
192 if errors.is_empty() {
193 Ok(doc_entries)
194 } else {
195 Err(Error::ParseErrors(errors))
196 }
197 }
198}