oak_core/helpers/
lexing.rs

1//! Lexer testing utilities for the Oak parsing framework.
2//!
3//! This module provides comprehensive testing infrastructure for lexers,
4//! including file-based testing, expected output comparison, and
5//! test result serialization.
6
7use crate::{
8    Language, Lexer, SyntaxKind,
9    errors::{OakDiagnostics, OakError},
10    helpers::{create_file, json_from_path, source_from_path},
11    source::Source,
12};
13use serde::{Deserialize, Serialize};
14use serde_json::{Serializer, ser::PrettyFormatter};
15use std::{
16    path::{Path, PathBuf},
17    sync::{Arc, Mutex},
18    thread,
19    time::{Duration, Instant},
20};
21use walkdir::WalkDir;
22
23/// A lexer testing utility that can run tests against multiple files.
24///
25/// The `LexerTester` provides functionality to test lexers against a directory
26/// of files with specific extensions, comparing actual output against expected
27/// results stored in JSON files.
28pub struct LexerTester {
29    root: PathBuf,
30    extensions: Vec<String>,
31    timeout: Duration,
32}
33
34/// Expected lexer test results for comparison.
35///
36/// This struct represents the expected output of a lexer test, including
37/// success status, token count, token data, and any expected errors.
38#[derive(Debug, Serialize, Deserialize, PartialEq)]
39pub struct LexerTestExpected {
40    success: bool,
41    count: usize,
42    tokens: Vec<TokenData>,
43    errors: Vec<String>,
44}
45
46/// Individual token data for lexer testing.
47///
48/// Represents a single token with its kind, text content, and position
49/// information used for testing lexer output.
50#[derive(Debug, Serialize, Deserialize, PartialEq)]
51pub struct TokenData {
52    kind: String,
53    text: String,
54    start: usize,
55    end: usize,
56}
57
58impl LexerTester {
59    /// Creates a new lexer tester with the specified root directory.
60    pub fn new<P: AsRef<Path>>(root: P) -> Self {
61        Self { root: root.as_ref().to_path_buf(), extensions: vec![], timeout: Duration::from_secs(10) }
62    }
63
64    /// Adds a file extension to test against.
65    pub fn with_extension(mut self, extension: impl ToString) -> Self {
66        self.extensions.push(extension.to_string());
67        self
68    }
69    /// Sets the timeout duration for each test.
70    pub fn with_timeout(mut self, time: Duration) -> Self {
71        self.timeout = time;
72        self
73    }
74
75    /// Run tests for the given lexer against all files in the root directory with the specified extensions.
76    ///
77    /// # Arguments
78    ///
79    /// * `lexer`: The lexer to test.
80    ///
81    /// # Examples
82    ///
83    /// ```
84    /// ```
85    pub fn run_tests<L, Lex>(self, lexer: Lex) -> Result<(), OakError>
86    where
87        L: Language + Send + Sync + 'static,
88        L::SyntaxKind: Serialize + std::fmt::Debug + Send + Sync,
89        Lex: Lexer<L> + Send + Sync + 'static + Clone,
90    {
91        let test_files = self.find_test_files()?;
92
93        for file_path in test_files {
94            println!("Testing file: {}", file_path.display());
95            self.test_single_file::<L, Lex>(&file_path, &lexer)?;
96        }
97
98        Ok(())
99    }
100
101    fn find_test_files(&self) -> Result<Vec<PathBuf>, OakError> {
102        let mut files = Vec::new();
103
104        for entry in WalkDir::new(&self.root) {
105            let entry = entry.unwrap();
106            let path = entry.path();
107
108            if path.is_file() {
109                if let Some(ext) = path.extension() {
110                    if self.extensions.iter().any(|e| e == ext.to_str().unwrap_or("")) {
111                        files.push(path.to_path_buf());
112                    }
113                }
114            }
115        }
116
117        Ok(files)
118    }
119
120    fn test_single_file<L, Lex>(&self, file_path: &Path, lexer: &Lex) -> Result<(), OakError>
121    where
122        L: Language + Send + Sync + 'static,
123        L::SyntaxKind: Serialize + std::fmt::Debug + Send + Sync,
124        Lex: Lexer<L> + Send + Sync + 'static + Clone,
125    {
126        let source = source_from_path(file_path)?;
127
128        // 使用Arc和Mutex来在线程间共享结果
129        let result = Arc::new(Mutex::new(None));
130        let result_clone = Arc::clone(&result);
131
132        // 克隆lexer以便在线程中使用
133        let lexer_clone = lexer.clone();
134        // 将source包装在Arc中以便在线程间共享
135        let source_arc = Arc::new(source);
136        let source_clone = Arc::clone(&source_arc);
137
138        // 创建一个新线程来执行词法分析
139        let handle = thread::spawn(move || {
140            let mut builder = crate::GreenBuilder::new(0);
141            let cache = crate::IncrementalCache::new(&mut builder);
142            let output = lexer_clone.lex_incremental(&*source_clone, 0, cache);
143            let mut result = result_clone.lock().unwrap();
144            *result = Some(output);
145        });
146
147        // 等待线程完成或超时
148        let start_time = Instant::now();
149        let timeout_occurred = loop {
150            // 检查线程是否已完成
151            if handle.is_finished() {
152                break false;
153            }
154
155            // 检查是否超时
156            if start_time.elapsed() > self.timeout {
157                break true;
158            }
159
160            // 短暂休眠以避免忙等待
161            thread::sleep(Duration::from_millis(10));
162        };
163
164        // 如果超时,返回错误
165        if timeout_occurred {
166            return Err(OakError::custom_error(&format!(
167                "Lexer test timed out after {:?} for file: {}",
168                self.timeout,
169                file_path.display()
170            )));
171        }
172
173        // 获取词法分析结果
174        let OakDiagnostics { result: tokens_result, mut diagnostics } = {
175            let result_guard = result.lock().unwrap();
176            match result_guard.as_ref() {
177                Some(output) => output.clone(),
178                None => return Err(OakError::custom_error("Failed to get lexer result")),
179            }
180        };
181
182        // 构造测试结果
183        let mut success = true;
184        let tokens = match tokens_result {
185            Ok(tokens) => tokens,
186            Err(e) => {
187                success = false;
188                diagnostics.push(e);
189                Vec::new()
190            }
191        };
192
193        if !diagnostics.is_empty() {
194            success = false;
195        }
196
197        let tokens: Vec<TokenData> = tokens
198            .into_iter()
199            .filter(|token| !token.kind.is_trivia())
200            .map(|token| {
201                let text = source_arc.as_ref().get_text_in(token.span.clone().into()).to_string();
202                TokenData { kind: format!("{:?}", token.kind), text, start: token.span.start, end: token.span.end }
203            })
204            .take(100)
205            .collect();
206
207        let errors: Vec<String> = diagnostics.iter().map(|e| e.to_string()).collect();
208        let test_result = LexerTestExpected { success, count: tokens.len(), tokens, errors };
209
210        // 处理预期结果文件
211        let expected_file = file_path
212            .with_extension(format!("{}.expected.json", file_path.extension().unwrap_or_default().to_str().unwrap_or("")));
213
214        let force_regenerated = std::env::var("REGENERATE_TESTS").unwrap_or("0".to_string()) == "1";
215
216        if expected_file.exists() && !force_regenerated {
217            let expected: LexerTestExpected = json_from_path(&expected_file)?;
218
219            if test_result != expected {
220                println!("Test failed for file: {}", file_path.display());
221                println!("Expected: {:#?}", expected);
222                println!("Actual: {:#?}", test_result);
223                return Err(OakError::custom_error("Test results do not match expected results"));
224            }
225        }
226        else {
227            let file = create_file(&expected_file)?;
228            let mut writer = Serializer::with_formatter(file, PrettyFormatter::with_indent(b"    "));
229            test_result.serialize(&mut writer)?;
230
231            println!("Created expected result file: {}\nNeed rerun", expected_file.display());
232        }
233
234        Ok(())
235    }
236}