1mod lexer;
6mod rules;
7mod selectors;
8mod values;
9
10use derive_more::From;
11pub use lexer::{lex_source, CSSToken};
12pub use rules::Rule;
13pub use selectors::Selector;
14use source_map::{Counter, SourceId, Span, StringWithSourceMap, ToString};
15use std::{mem, path::Path};
16use tokenizer_lib::{BufferedTokenQueue, Token, TokenReader};
17pub use values::CSSValue;
18
19#[derive(Debug)]
20pub struct ParseError {
21 pub reason: String,
22 pub position: Span,
23}
24
25impl From<Option<(CSSToken, Token<CSSToken, Span>)>> for ParseError {
26 fn from(opt: Option<(CSSToken, Token<CSSToken, Span>)>) -> Self {
27 if let Some((expected_type, invalid_token)) = opt {
28 Self {
29 reason: format!(
30 "Expected '{:?}' found '{:?}'",
31 expected_type, invalid_token.0
32 ),
33 position: invalid_token.1,
34 }
35 } else {
36 unreachable!()
37 }
38 }
39}
40
41#[derive(Clone)]
43pub struct ToStringSettings {
44 pub minify: bool,
45 pub indent_with: String,
46}
47
48impl Default for ToStringSettings {
49 fn default() -> Self {
50 ToStringSettings {
51 minify: false,
52 indent_with: " ".to_owned(),
53 }
54 }
55}
56
57impl ToStringSettings {
58 pub fn minified() -> Self {
60 ToStringSettings {
61 minify: true,
62 indent_with: "".to_owned(),
63 }
64 }
65}
66
67pub(crate) fn token_as_ident(token: Token<CSSToken, Span>) -> Result<(String, Span), ParseError> {
68 if let CSSToken::Ident(val) = token.0 {
69 Ok((val, token.1))
70 } else {
71 Err(ParseError {
72 reason: format!("Expected ident found '{:?}'", token.0),
73 position: token.1,
74 })
75 }
76}
77
78pub trait ASTNode: Sized + Send + Sync + 'static {
79 #[cfg(not(target_arch = "wasm32"))]
81 fn from_string(
82 string: String,
83 source_id: SourceId,
84 offset: Option<usize>,
85 ) -> Result<Self, ParseError> {
86 use std::thread;
87 use tokenizer_lib::ParallelTokenQueue;
88
89 if string.len() > 2048 {
90 let (mut sender, mut reader) = ParallelTokenQueue::new();
91 let parsing_thread = thread::spawn(move || {
92 let res = Self::from_reader(&mut reader);
93 if res.is_ok() {
94 reader.expect_next(CSSToken::EOS)?;
95 }
96 res
97 });
98 lexer::lex_source(&string, &mut sender, source_id, None)?;
99 parsing_thread.join().expect("Parsing thread panicked")
100 } else {
101 let mut reader = BufferedTokenQueue::new();
102 lexer::lex_source(&string, &mut reader, SourceId::null(), offset)?;
103 let this = Self::from_reader(&mut reader);
104 reader.expect_next(CSSToken::EOS)?;
105 this
106 }
107 }
108
109 #[cfg(target_arch = "wasm32")]
111 fn from_string(string: String) -> Result<Self, ParseError> {
112 let mut reader = StaticTokenChannel::new();
113 lexer::lex_source(&string, &mut reader)?;
114 let this = Self::from_reader(&mut reader);
115 reader.expect_next(CSSToken::EOS)?;
116 this
117 }
118
119 fn get_position(&self) -> Option<&Span>;
121
122 fn from_reader(reader: &mut impl TokenReader<CSSToken, Span>) -> Result<Self, ParseError>;
123
124 fn to_string_from_buffer(
126 &self,
127 buf: &mut impl ToString,
128 settings: &ToStringSettings,
129 depth: u8,
130 );
131
132 fn to_string(&self, settings: &ToStringSettings) -> String {
134 let mut buffer = String::new();
135 self.to_string_from_buffer(&mut buffer, settings, 0);
136 buffer
137 }
138}
139
140#[derive(Debug)]
142pub struct StyleSheet {
143 pub entries: Vec<Entry>,
144}
145
146#[derive(Debug, From)]
147pub enum Entry {
148 Rule(Rule),
149 Comment(String),
150}
151
152impl StyleSheet {
153 fn from_reader(reader: &mut impl TokenReader<CSSToken, Span>) -> Result<Self, ParseError> {
154 let mut entries: Vec<Entry> = Vec::new();
155 while let Some(peek) = reader.peek() {
156 match peek {
157 Token(CSSToken::EOS, _) => break,
158 Token(CSSToken::Comment(_), _) => {
159 if let Token(CSSToken::Comment(comment), _) = reader.next().unwrap() {
160 entries.push(Entry::Comment(comment));
161 } else {
162 unreachable!()
163 }
164 }
165 _ => {
166 entries.push(Rule::from_reader(reader)?.into());
167 }
168 }
169 }
170 Ok(Self { entries })
171 }
172
173 fn to_string_from_buffer(&self, buf: &mut impl ToString, settings: &ToStringSettings) {
174 for (idx, entry) in self.entries.iter().enumerate() {
175 match entry {
176 Entry::Rule(rule) => {
177 rule.to_string_from_buffer(buf, settings, 0);
178 }
179 Entry::Comment(comment) => {
180 if !settings.minify {
181 buf.push_str("/*");
182 buf.push_str_contains_new_line(comment);
183 buf.push_str("*/");
184 }
185 }
186 }
187 if !settings.minify && idx + 1 < self.entries.len() {
188 buf.push_new_line();
189 buf.push_new_line();
190 }
191 }
192 }
193
194 pub fn to_string(&self, settings: Option<ToStringSettings>) -> String {
195 let mut buf = String::new();
196 self.to_string_from_buffer(&mut buf, &settings.unwrap_or_default());
197 buf
198 }
199
200 pub fn to_string_with_source_map(
202 &self,
203 settings: Option<ToStringSettings>,
204 ) -> (String, String) {
205 let mut buf = StringWithSourceMap::new();
206 self.to_string_from_buffer(&mut buf, &settings.unwrap_or_default());
207 buf.build()
208 }
209
210 pub fn length(&self, settings: Option<ToStringSettings>) -> usize {
211 let mut buf = Counter::new();
212 self.to_string_from_buffer(&mut buf, &settings.unwrap_or_default());
213 buf.get_count()
214 }
215
216 #[cfg(not(target_arch = "wasm32"))]
217 pub fn from_path(path: impl AsRef<Path>) -> Result<Self, ParseError> {
218 use std::fs;
219
220 let path_buf = path.as_ref().to_path_buf();
221 let source = fs::read_to_string(path).unwrap();
222 let source_id = SourceId::new(path_buf, source.clone());
223 Self::from_string(source, source_id)
224 }
225
226 pub fn from_string(source: String, source_id: SourceId) -> Result<Self, ParseError> {
227 use std::thread;
228 use tokenizer_lib::ParallelTokenQueue;
229
230 let (mut sender, mut reader) = ParallelTokenQueue::new();
231 let parsing_thread = thread::spawn(move || {
232 let res = Self::from_reader(&mut reader);
233 if res.is_ok() {
234 reader.expect_next(CSSToken::EOS)?;
235 }
236 res
237 });
238
239 lexer::lex_source(&source, &mut sender, source_id, None)?;
240 parsing_thread.join().unwrap()
241 }
242}
243
244pub fn raise_nested_rules(stylesheet: &mut StyleSheet) {
246 let mut raised_rules: Vec<Rule> = Vec::new();
247 for entry in stylesheet.entries.iter_mut() {
248 if let Entry::Rule(rule) = entry {
249 raise_subrules(rule, &mut raised_rules);
250 }
251 }
252 stylesheet
253 .entries
254 .extend(raised_rules.into_iter().map(Into::into));
255}
256
257fn raise_subrules(rule: &mut Rule, raised_rules: &mut Vec<Rule>) {
259 if let Some(nested_rules) = &mut rule.nested_rules {
260 for mut nested_rule in nested_rules.drain(..) {
262 let old_selectors = mem::replace(&mut nested_rule.selectors, vec![]);
263 for selector in rule.selectors.iter() {
264 for nested_selector in old_selectors.iter().cloned() {
265 nested_rule
266 .selectors
267 .push(selector.nest_selector(nested_selector));
268 }
269 }
270 raise_subrules(&mut nested_rule, raised_rules);
271 raised_rules.push(nested_rule);
272 }
273 }
274}
275
276#[cfg(test)]
277mod tests {
278 use super::*;
279
280 #[test]
281 fn parsing_rules() {
282 let style_sheet = StyleSheet::from_string(
283 include_str!("../examples/example1.css").to_owned(),
284 SourceId::null(),
285 )
286 .unwrap();
287 assert_eq!(style_sheet.entries.len(), 2);
288 }
289
290 #[test]
291 fn style_sheet_to_string() {
292 let source = include_str!("../examples/example1.css").to_owned();
293 let style_sheet = StyleSheet::from_string(source.clone(), SourceId::null()).unwrap();
294 assert_eq!(style_sheet.to_string(None), source.replace('\r', ""));
295 }
296}