Skip to main content

oxc_sourcemap/
sourcemap_visualizer.rs

1use std::borrow::Cow;
2
3use crate::SourceMap;
4
5/// The `SourcemapVisualizer` is a helper for sourcemap testing.
6/// It print the mapping of original content and final content tokens.
7pub struct SourcemapVisualizer<'a> {
8    code: &'a str,
9    sourcemap: &'a SourceMap,
10}
11
12impl<'a> SourcemapVisualizer<'a> {
13    pub fn new(code: &'a str, sourcemap: &'a SourceMap) -> Self {
14        Self { code, sourcemap }
15    }
16
17    pub fn get_url(&self) -> String {
18        let result = self.sourcemap.to_json_string();
19        let s = format!("{}\0{}{}\0{}", self.code.len(), self.code, result.len(), result);
20        let hash = base64_simd::STANDARD.encode_to_string(s);
21        format!("https://evanw.github.io/source-map-visualization/#{hash}")
22    }
23
24    pub fn get_text(&self) -> String {
25        let mut s = String::new();
26        let source_contents = &self.sourcemap.source_contents;
27        if self.sourcemap.source_contents.is_empty() {
28            s.push_str("[no source contents]\n");
29            return s;
30        }
31
32        let source_contents_lines_map: Vec<Vec<Vec<u16>>> = source_contents
33            .iter()
34            .filter_map(|content| {
35                let content = content.as_ref()?;
36                Some(Self::generate_line_utf16_tables(content))
37            })
38            .collect();
39
40        let output_lines = Self::generate_line_utf16_tables(self.code);
41
42        let tokens = &self.sourcemap.tokens;
43
44        let mut last_source: Option<&str> = None;
45        for i in 0..tokens.len() {
46            let t = &tokens[i];
47            let Some(source_id) = t.get_source_id() else {
48                continue;
49            };
50            let Some(source) = self.sourcemap.get_source(source_id) else { continue };
51            let source_lines = &source_contents_lines_map[source_id as usize];
52
53            // Print source
54            if last_source != Some(source) {
55                s.push('-');
56                s.push(' ');
57                s.push_str(source);
58                s.push('\n');
59                last_source = Some(source);
60            }
61
62            // validate token position
63            let dst_invalid = t.dst_line as usize >= output_lines.len()
64                || (t.dst_col as usize) >= output_lines[t.dst_line as usize].len();
65            let src_invalid = t.src_line as usize >= source_lines.len()
66                || (t.src_col as usize) >= source_lines[t.src_line as usize].len();
67            if dst_invalid || src_invalid {
68                s.push_str(&format!(
69                    "({}:{}){} --> ({}:{}){}\n",
70                    t.src_line,
71                    t.src_col,
72                    if src_invalid { " [invalid]" } else { "" },
73                    t.dst_line,
74                    t.dst_col,
75                    if dst_invalid { " [invalid]" } else { "" },
76                ));
77                continue;
78            }
79
80            // find next dst column or EOL
81            let dst_end_col = {
82                match tokens.get(i + 1) {
83                    Some(t2) if t2.dst_line == t.dst_line => t2.dst_col,
84                    _ => output_lines[t.dst_line as usize].len() as u32,
85                }
86            };
87
88            // find next src column or EOL
89            let src_end_col = 'result: {
90                for t2 in &tokens[i + 1..] {
91                    if t2.get_source_id() == t.get_source_id() && t2.src_line == t.src_line {
92                        // skip duplicate or backward
93                        if t2.src_col <= t.src_col {
94                            continue;
95                        }
96                        break 'result t2.src_col;
97                    }
98                    break;
99                }
100                source_lines[t.src_line as usize].len() as u32
101            };
102
103            s.push_str(&format!(
104                "({}:{}) {:?} --> ({}:{}) {:?}\n",
105                t.src_line,
106                t.src_col,
107                Self::str_slice_by_token(source_lines, t.src_line, t.src_col, src_end_col),
108                t.dst_line,
109                t.dst_col,
110                Self::str_slice_by_token(&output_lines, t.dst_line, t.dst_col, dst_end_col)
111            ));
112        }
113
114        s
115    }
116
117    fn generate_line_utf16_tables(content: &str) -> Vec<Vec<u16>> {
118        let mut tables = vec![];
119        let mut line_byte_offset = 0;
120        for (i, ch) in content.char_indices() {
121            match ch {
122                '\r' | '\n' | '\u{2028}' | '\u{2029}' => {
123                    // Handle Windows-specific "\r\n" newlines
124                    if ch == '\r' && content.chars().nth(i + 1) == Some('\n') {
125                        continue;
126                    }
127                    tables.push(content[line_byte_offset..=i].encode_utf16().collect::<Vec<_>>());
128                    line_byte_offset = i + 1;
129                }
130                _ => {}
131            }
132        }
133        tables.push(content[line_byte_offset..].encode_utf16().collect::<Vec<_>>());
134        tables
135    }
136
137    fn str_slice_by_token(buff: &[Vec<u16>], line: u32, start: u32, end: u32) -> Cow<'_, str> {
138        let line = line as usize;
139        let start = start as usize;
140        let end = end as usize;
141        let s = &buff[line];
142        String::from_utf16(&s[start.min(end).min(s.len())..start.max(end).min(s.len())])
143            .unwrap()
144            .replace("\r", "")
145            .into()
146    }
147}