oxc_codegen/
sourcemap_builder.rs

1use std::path::Path;
2
3use oxc_index::{IndexVec, define_nonmax_u32_index_type};
4use oxc_span::Span;
5use oxc_syntax::identifier::{LS, PS};
6
7use crate::str::{LS_LAST_2_BYTES, LS_OR_PS_FIRST_BYTE, PS_LAST_2_BYTES};
8
9/// Number of lines to check with linear search when translating byte position to line index
10const LINE_SEARCH_LINEAR_ITERATIONS: usize = 16;
11
12define_nonmax_u32_index_type! {
13    /// Index into vec of `ColumnOffsets`
14    struct ColumnOffsetsId;
15}
16
17/// Line offset tables.
18///
19/// Used for tracking lines and columns from byte offsets via binary search.
20///
21/// Code is adapted from [esbuild](https://github.com/evanw/esbuild/blob/cc74e6042a9f573bf58e1e3f165ebda70af4ad3b/internal/js_printer/js_printer.go#L4806-L4808)
22///
23/// Most lines of source code will not contain Unicode chars, so optimize storage for this common case.
24///
25/// Each line is represented by a `Line`.
26/// Where a line is entirely ASCII, translating byte offset to UTF-16 column is simple,
27/// given the byte offset of start of line. A column lookup table isn't needed for that line.
28/// In this case, `Line::column_offsets_id` is `None`.
29/// For rare lines which do contain Unicode chars, we store column offsets in a `ColumnOffsets` which
30/// is stored in a separate `IndexVec`. `Line::column_offsets_id` contains index for that line's `ColumnOffsets`.
31/// Storing column offset info which is rarely used in a separate structure keeps `Line` as small as possible.
32#[derive(Debug, Default)]
33pub struct LineOffsetTables {
34    lines: Vec<Line>,
35    column_offsets: IndexVec<ColumnOffsetsId, ColumnOffsets>,
36}
37
38#[derive(Debug)]
39pub struct Line {
40    byte_offset_to_start_of_line: u32,
41    column_offsets_id: Option<ColumnOffsetsId>,
42}
43
44#[derive(Debug)]
45pub struct ColumnOffsets {
46    byte_offset_to_first: u32,
47    columns: Box<[u32]>,
48}
49
50#[expect(clippy::struct_field_names)]
51pub struct SourcemapBuilder<'a> {
52    source_id: u32,
53    original_source: &'a str,
54    last_generated_update: usize,
55    last_position: Option<u32>,
56    line_offset_tables: LineOffsetTables,
57    sourcemap_builder: oxc_sourcemap::SourceMapBuilder,
58    generated_line: u32,
59    generated_column: u32,
60    /// Tracks the last accessed line index to optimize sequential lookups in `search_original_line_and_column`.
61    /// Most calls to this method access positions in increasing order (e.g., when mapping source tokens linearly),
62    /// so we can avoid unnecessary binary searches by advancing linearly from this cached index.
63    last_line_lookup: u32,
64}
65
66impl<'a> SourcemapBuilder<'a> {
67    pub fn new(path: &Path, source_text: &'a str) -> Self {
68        let mut sourcemap_builder = oxc_sourcemap::SourceMapBuilder::default();
69        let line_offset_tables = Self::generate_line_offset_tables(source_text);
70        let source_id =
71            sourcemap_builder.set_source_and_content(path.to_string_lossy().as_ref(), source_text);
72        Self {
73            source_id,
74            original_source: source_text,
75            last_generated_update: 0,
76            last_position: None,
77            line_offset_tables,
78            sourcemap_builder,
79            generated_line: 0,
80            generated_column: 0,
81            last_line_lookup: 0,
82        }
83    }
84
85    pub fn into_sourcemap(self) -> oxc_sourcemap::SourceMap {
86        self.sourcemap_builder.into_sourcemap()
87    }
88
89    pub fn add_source_mapping_for_name(&mut self, output: &[u8], span: Span, name: &str) {
90        debug_assert!(
91            (span.end as usize) <= self.original_source.len(),
92            "violated {}:{} <= {} for {name}",
93            span.start,
94            span.end,
95            self.original_source.len()
96        );
97        let original_name = self.original_source.get(span.start as usize..span.end as usize);
98        // The token name should be original name.
99        // If it hasn't change, name should be `None` to reduce `SourceMap` size.
100        let token_name = if original_name == Some(name) { None } else { original_name };
101        self.add_source_mapping(output, span.start, token_name);
102    }
103
104    pub fn add_source_mapping(&mut self, output: &[u8], position: u32, name: Option<&str>) {
105        if self.last_position == Some(position) {
106            return;
107        }
108        let (original_line, original_column) = self.search_original_line_and_column(position);
109        self.update_generated_line_and_column(output);
110        let name_id = name.map(|s| self.sourcemap_builder.add_name(s));
111        self.sourcemap_builder.add_token(
112            self.generated_line,
113            self.generated_column,
114            original_line,
115            original_column,
116            Some(self.source_id),
117            name_id,
118        );
119        self.last_position = Some(position);
120    }
121
122    #[expect(clippy::cast_possible_truncation)]
123    fn search_original_line_and_column(&mut self, position: u32) -> (u32, u32) {
124        let original_line = self.search_original_line(position);
125
126        // Store line index as starting point for next search
127        self.last_line_lookup = original_line as u32;
128
129        let line = &self.line_offset_tables.lines[original_line];
130        let mut original_column = position - line.byte_offset_to_start_of_line;
131        if let Some(column_offsets_id) = line.column_offsets_id {
132            let column_offsets = &self.line_offset_tables.column_offsets[column_offsets_id];
133            if original_column >= column_offsets.byte_offset_to_first {
134                original_column = column_offsets.columns
135                    [(original_column - column_offsets.byte_offset_to_first) as usize];
136            }
137        }
138        (original_line as u32, original_column)
139    }
140
141    /// Find line index for byte index `position`, using line offset table.
142    ///
143    /// Usually output code is roughly in same order as it was in original source,
144    /// so line will be close to the line found in last call to this function.
145    ///
146    /// So do fast linear search first over a few lines, and fallback to slower binary search
147    /// if this doesn't find the line.
148    fn search_original_line(&self, position: u32) -> usize {
149        let lines = &self.line_offset_tables.lines;
150        let idx = self.last_line_lookup as usize;
151
152        if position >= lines[idx].byte_offset_to_start_of_line {
153            self.search_original_line_forwards(position)
154        } else {
155            self.search_original_line_backwards(position)
156        }
157    }
158
159    /// Find line index for byte index `position`, starting search at `last_line_lookup`,
160    /// and working forwards.
161    ///
162    /// Search forwards, looking for first line which starts *after* `position`.
163    /// `position` then must be on the line before that one.
164    fn search_original_line_forwards(&self, position: u32) -> usize {
165        let lines = &self.line_offset_tables.lines;
166        let last_idx = self.last_line_lookup as usize;
167
168        let start_idx = last_idx + 1;
169        let end_idx = start_idx + LINE_SEARCH_LINEAR_ITERATIONS;
170
171        // We have a fast path for when there are more than `LINE_SEARCH_LINEAR_ITERATIONS` lines
172        // to search (common case unless file is very short).
173        // Fast path is do linear search on first `LINE_SEARCH_LINEAR_ITERATIONS` lines,
174        // then fallback to binary search over remaining lines.
175        // If less than `LINE_SEARCH_LINEAR_ITERATIONS` lines to search, just do linear search,
176        // but that's slower as number of lines being searched is not constant, so loop cannot be unrolled.
177        if end_idx > lines.len() {
178            // Less than `LINE_SEARCH_LINEAR_ITERATIONS` lines to search. Take slow path.
179            // Unless file is very short, this branch is rarely taken.
180            return self.search_original_line_forwards_when_few_lines(position);
181        }
182
183        // Linear search for `LINE_SEARCH_LINEAR_ITERATIONS` lines.
184        // Compiler should unroll this loop as it has constant number of iterations.
185        // https://godbolt.org/z/heh1cnYa4
186        for (line_idx, line) in lines[start_idx..end_idx].iter().enumerate() {
187            if line.byte_offset_to_start_of_line > position {
188                // This line starts after `position`. `position` must be on previous line.
189                return start_idx + line_idx - 1;
190            }
191        }
192
193        // Line not found yet. Binary search over remaining lines.
194        lines[end_idx..].partition_point(|line| line.byte_offset_to_start_of_line <= position)
195            + end_idx
196            - 1
197    }
198
199    #[cold]
200    fn search_original_line_forwards_when_few_lines(&self, position: u32) -> usize {
201        let lines = &self.line_offset_tables.lines;
202        let last_idx = self.last_line_lookup as usize;
203        let start_idx = last_idx + 1;
204
205        for (line_idx, line) in lines[start_idx..].iter().enumerate() {
206            if line.byte_offset_to_start_of_line > position {
207                // This line starts after `position`. `position` must be on previous line.
208                return start_idx + line_idx - 1;
209            }
210        }
211
212        // No line starts after `position`. `position` must be on last line.
213        lines.len() - 1
214    }
215
216    fn search_original_line_backwards(&self, position: u32) -> usize {
217        let lines = &self.line_offset_tables.lines;
218        let mut idx = self.last_line_lookup as usize;
219
220        while lines[idx].byte_offset_to_start_of_line > position {
221            idx -= 1;
222        }
223
224        if lines[idx].byte_offset_to_start_of_line < position {
225            idx = lines
226                .partition_point(|table| table.byte_offset_to_start_of_line <= position)
227                .saturating_sub(1);
228        }
229
230        idx
231    }
232
233    #[expect(clippy::cast_possible_truncation)]
234    fn update_generated_line_and_column(&mut self, output: &[u8]) {
235        const BATCH_SIZE: usize = 32;
236
237        let start_index = self.last_generated_update;
238
239        // Find last line break
240        let mut line_start_index = start_index;
241        let mut idx = line_start_index;
242        let mut last_line_is_ascii = true;
243
244        macro_rules! handle_byte {
245            ($byte:ident) => {
246                match $byte {
247                    b'\n' => {}
248                    b'\r' => {
249                        // Handle Windows-specific "\r\n" newlines
250                        if output.get(idx + 1) == Some(&b'\n') {
251                            idx += 1;
252                        }
253                    }
254                    _ if $byte.is_ascii() => {
255                        idx += 1;
256                        continue;
257                    }
258                    LS_OR_PS_FIRST_BYTE => {
259                        let next_byte = output[idx + 1];
260                        let next_next_byte = output[idx + 2];
261                        if !matches!([next_byte, next_next_byte], LS_LAST_2_BYTES | PS_LAST_2_BYTES)
262                        {
263                            last_line_is_ascii = false;
264                            idx += 1;
265                            continue;
266                        }
267                    }
268                    _ => {
269                        // Unicode char
270                        last_line_is_ascii = false;
271                        idx += 1;
272                        continue;
273                    }
274                }
275
276                // Line break found.
277                // `iter` is now positioned after line break.
278                line_start_index = idx + 1;
279                self.generated_line += 1;
280                self.generated_column = 0;
281                last_line_is_ascii = true;
282                idx += 1;
283            };
284        }
285
286        while let (end, overflow) = idx.overflowing_add(BATCH_SIZE)
287            && !overflow
288            && end < output.len()
289        {
290            while idx < end {
291                let b = output[idx];
292                handle_byte!(b);
293            }
294        }
295        while idx < output.len() {
296            let b = output[idx];
297            handle_byte!(b);
298        }
299
300        // Calculate column
301        self.generated_column += if last_line_is_ascii {
302            (output.len() - line_start_index) as u32
303        } else {
304            // TODO: It'd be better if could use `from_utf8_unchecked` here, but we'd need to make this
305            // function unsafe and caller guarantees `output` contains a valid UTF-8 string
306            let last_line = std::str::from_utf8(&output[line_start_index..]).unwrap();
307            // Mozilla's "source-map" library counts columns using UTF-16 code units
308            last_line.encode_utf16().count() as u32
309        };
310        self.last_generated_update = output.len();
311    }
312
313    fn generate_line_offset_tables(content: &str) -> LineOffsetTables {
314        let mut lines = vec![];
315        let mut column_offsets = IndexVec::new();
316
317        // Used as a buffer to reduce memory reallocations.
318        // Pre-allocate with reasonable capacity to avoid frequent reallocations.
319        // 16 is a guess, probably adequate for most files which don't heavily use unicode chars.
320        // 16 entries is 64 bytes = 1 CPU cache line on most CPUs.
321        // If file does heavily use unicode chars, `columns` will grow adaptively as needed.
322        let mut columns = Vec::with_capacity(16);
323
324        // Process content line-by-line.
325        // For each line, start by assuming line will be entirely ASCII, and read byte-by-byte.
326        // If line is all ASCII, UTF-8 columns and UTF-16 columns are the same,
327        // so no need to create a `columns` Vec. This is the fast path for common case.
328        // If a Unicode character found, read rest of line char-by-char, populating `columns` Vec.
329        // At end of line, go back to top of outer loop, and again assume ASCII for next line.
330        let mut line_byte_offset = 0;
331        'lines: loop {
332            lines.push(Line {
333                byte_offset_to_start_of_line: line_byte_offset,
334                column_offsets_id: None,
335            });
336
337            let remaining = &content.as_bytes()[line_byte_offset as usize..];
338            for (byte_offset_from_line_start, b) in remaining.iter().enumerate() {
339                #[expect(clippy::cast_possible_truncation)]
340                let mut byte_offset_from_line_start = byte_offset_from_line_start as u32;
341                match b {
342                    b'\n' => {
343                        byte_offset_from_line_start += 1;
344                    }
345                    b'\r' => {
346                        byte_offset_from_line_start += 1;
347                        // Handle Windows-specific "\r\n" newlines
348                        if remaining.get(byte_offset_from_line_start as usize) == Some(&b'\n') {
349                            byte_offset_from_line_start += 1;
350                        }
351                    }
352                    _ if b.is_ascii() => {
353                        continue;
354                    }
355                    _ => {
356                        // Unicode char found.
357                        // Set `column_offsets_id` for line and create `columns` Vec.
358                        let line = lines.iter_mut().last().unwrap();
359                        line.column_offsets_id =
360                            Some(ColumnOffsetsId::from_usize(column_offsets.len()));
361
362                        // Loop through rest of line char-by-char.
363                        // `chunk_byte_offset` in this loop is byte offset from start of this 1st
364                        // Unicode char.
365                        let mut column = byte_offset_from_line_start;
366                        line_byte_offset += byte_offset_from_line_start;
367                        let remaining = &content[line_byte_offset as usize..];
368                        for (chunk_byte_offset, ch) in remaining.char_indices() {
369                            #[expect(clippy::cast_possible_truncation)]
370                            let mut chunk_byte_offset = chunk_byte_offset as u32;
371                            columns.extend(std::iter::repeat_n(column, ch.len_utf8()));
372
373                            match ch {
374                                '\r' => {
375                                    // Handle Windows-specific "\r\n" newlines
376                                    chunk_byte_offset += 1;
377                                    if remaining.as_bytes().get(chunk_byte_offset as usize)
378                                        == Some(&b'\n')
379                                    {
380                                        chunk_byte_offset += 1;
381                                        columns.push(column + 1);
382                                    }
383                                }
384                                '\n' => {
385                                    chunk_byte_offset += 1;
386                                }
387                                LS | PS => {
388                                    chunk_byte_offset += 3;
389                                }
390                                #[expect(clippy::cast_possible_truncation)]
391                                _ => {
392                                    // Mozilla's "source-map" library counts columns using UTF-16 code units
393                                    column += ch.len_utf16() as u32;
394                                    continue;
395                                }
396                            }
397
398                            // Line break found.
399                            // `chunk_byte_offset` is now the offset of *end* of the line break.
400                            line_byte_offset += chunk_byte_offset;
401
402                            // Record column offsets.
403                            // `columns.clone().into_boxed_slice()` does perform an allocation,
404                            // but only one - `Vec::clone` produces a `Vec` with `capacity == len`,
405                            // so `Vec::into_boxed_slice` just drops the `capacity` field,
406                            // and does not need to reallocate again.
407                            // `columns` is reused for next line, and will grow adaptively depending on
408                            // how heavily the file uses unicode chars.
409                            column_offsets.push(ColumnOffsets {
410                                byte_offset_to_first: byte_offset_from_line_start,
411                                columns: columns.clone().into_boxed_slice(),
412                            });
413                            columns.clear();
414
415                            // Revert back to outer loop for next line
416                            continue 'lines;
417                        }
418
419                        // EOF.
420                        // One last column entry for EOF position.
421                        columns.push(column);
422
423                        // Record column offsets
424                        column_offsets.push(ColumnOffsets {
425                            byte_offset_to_first: byte_offset_from_line_start,
426                            columns: columns.into_boxed_slice(),
427                        });
428
429                        break 'lines;
430                    }
431                }
432
433                // Line break found.
434                // `byte_offset_from_line_start` is now the length of line *including* line break.
435                line_byte_offset += byte_offset_from_line_start;
436                continue 'lines;
437            }
438
439            // EOF
440            break;
441        }
442
443        LineOffsetTables { lines, column_offsets }
444    }
445}
446
447#[cfg(test)]
448mod test {
449    use super::*;
450
451    #[test]
452    fn builder_ascii() {
453        assert_mapping("", &[(0, 0, 0)]);
454        assert_mapping("a", &[(0, 0, 0), (1, 0, 1)]);
455        assert_mapping("\n", &[(0, 0, 0), (1, 1, 0)]);
456        assert_mapping("a\n", &[(0, 0, 0), (1, 0, 1), (2, 1, 0)]);
457        assert_mapping("\na", &[(0, 0, 0), (1, 1, 0), (2, 1, 1)]);
458        assert_mapping(
459            "ab\ncd\n\nef",
460            &[
461                (0, 0, 0),
462                (1, 0, 1),
463                (2, 0, 2),
464                (3, 1, 0),
465                (4, 1, 1),
466                (5, 1, 2),
467                (6, 2, 0),
468                (7, 3, 0),
469                (8, 3, 1),
470                (9, 3, 2),
471            ],
472        );
473
474        assert_mapping("\r", &[(0, 0, 0), (1, 1, 0)]);
475        assert_mapping("\r\r", &[(0, 0, 0), (1, 1, 0), (2, 2, 0)]);
476        assert_mapping("a\ra", &[(0, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 1)]);
477
478        assert_mapping("\r\n", &[(0, 0, 0), (1, 0, 1), (2, 1, 0)]);
479        assert_mapping("\r\n\r\n", &[(0, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 1), (4, 2, 0)]);
480        assert_mapping("a\r\na", &[(0, 0, 0), (1, 0, 1), (2, 0, 2), (3, 1, 0), (4, 1, 1)]);
481    }
482
483    #[test]
484    fn builder_unicode() {
485        assert_mapping("Ö", &[(0, 0, 0), (2, 0, 1)]);
486        assert_mapping("ÖÖ", &[(0, 0, 0), (2, 0, 1), (4, 0, 2)]);
487        assert_mapping("Ö\n", &[(0, 0, 0), (2, 0, 1), (3, 1, 0)]);
488        assert_mapping("ÖÖ\n", &[(0, 0, 0), (2, 0, 1), (4, 0, 2), (5, 1, 0)]);
489        assert_mapping("\nÖ", &[(0, 0, 0), (1, 1, 0), (3, 1, 1)]);
490        assert_mapping("\nÖÖ", &[(0, 0, 0), (1, 1, 0), (3, 1, 1), (5, 1, 2)]);
491        assert_mapping("Ö\nÖ", &[(0, 0, 0), (2, 0, 1), (3, 1, 0), (5, 1, 1)]);
492        assert_mapping("\nÖÖ\n", &[(0, 0, 0), (1, 1, 0), (3, 1, 1), (5, 1, 2), (6, 2, 0)]);
493        assert_mapping("Ö\ra", &[(0, 0, 0), (2, 0, 1), (3, 1, 0), (4, 1, 1)]);
494        assert_mapping("Ö\r\na", &[(0, 0, 0), (2, 0, 1), (3, 0, 2), (4, 1, 0), (5, 1, 1)]);
495    }
496
497    #[test]
498    fn builder_with_unordered_position() {
499        assert_mapping("\na\nb", &[(4, 2, 1), (0, 0, 0), (1, 1, 0), (2, 1, 1), (3, 2, 0)]);
500    }
501
502    fn assert_mapping(source: &str, mappings: &[(u32, u32, u32)]) {
503        let mut builder = SourcemapBuilder::new(Path::new("x.js"), source);
504        for (position, expected_line, expected_col) in mappings.iter().copied() {
505            let (line, col) = builder.search_original_line_and_column(position);
506            assert_eq!(
507                builder.search_original_line_and_column(position),
508                (expected_line, expected_col),
509                "Incorrect mapping for '{source}' - position {position} = line {line}, column {col}"
510            );
511        }
512    }
513
514    #[test]
515    fn add_source_mapping() {
516        fn create_mappings(source: &str, line: u32, column: u32) {
517            let mut builder = SourcemapBuilder::new(Path::new("x.js"), source);
518            let output: Vec<u8> = source.as_bytes().into();
519            for (i, _ch) in source.char_indices() {
520                #[expect(clippy::cast_possible_truncation)]
521                builder.add_source_mapping(&output, i as u32, None);
522                assert!(
523                    builder.generated_line == line && builder.generated_column == column,
524                    "Incorrect generated mapping for '{source}' ({:?}) starting at {i} - line {}, column {}",
525                    source.as_bytes(),
526                    builder.generated_line,
527                    builder.generated_column
528                );
529                assert_eq!(builder.last_generated_update, source.len());
530            }
531        }
532
533        create_mappings("", 0, 0);
534        create_mappings("abc", 0, 3);
535        create_mappings("\n", 1, 0);
536        create_mappings("\n\n\n", 3, 0);
537        create_mappings("\r", 1, 0);
538        create_mappings("\r\r\r", 3, 0);
539        create_mappings("\r\n", 1, 0);
540        create_mappings("\r\n\r\n\r\n", 3, 0);
541        create_mappings("\nabc", 1, 3);
542        create_mappings("abc\n", 1, 0);
543        create_mappings("\rabc", 1, 3);
544        create_mappings("abc\r", 1, 0);
545        create_mappings("\r\nabc", 1, 3);
546        create_mappings("abc\r\n", 1, 0);
547        create_mappings("ÖÖ\nÖ\nÖÖÖ", 2, 3);
548    }
549
550    #[test]
551    fn add_source_mapping_for_name() {
552        let output = b"ac";
553        let mut builder = SourcemapBuilder::new(Path::new("x.js"), "ab");
554        builder.add_source_mapping_for_name(output, Span::new(0, 1), "a");
555        builder.add_source_mapping_for_name(output, Span::new(1, 2), "c");
556        let sm = builder.into_sourcemap();
557        // The name `a` not change.
558        assert_eq!(
559            sm.get_source_view_token(0_u32).as_ref().and_then(|token| token.get_name()),
560            None
561        );
562        // The name `b` -> `c`, save `b` to token.
563        assert_eq!(
564            sm.get_source_view_token(1_u32).as_ref().and_then(|token| token.get_name()),
565            Some(&"b".into())
566        );
567    }
568
569    #[test]
570    fn add_source_mapping_for_unordered_position() {
571        let output = b"";
572        let mut builder = SourcemapBuilder::new(Path::new("x.js"), "ab");
573        builder.add_source_mapping(output, 1, None);
574        builder.add_source_mapping(output, 0, None);
575        let sm = builder.into_sourcemap();
576        assert_eq!(sm.get_tokens().count(), 2);
577    }
578
579    static SOURCE: &str = "a\nbc\ndef";
580    static MAPPINGS: &[(u32, u32)] = &[
581        (0, 0), // 'a'
582        (0, 1), // '\n'
583        (1, 0), // 'b'
584        (1, 1), // 'c'
585        (1, 2), // '\n'
586        (2, 0), // 'd'
587        (2, 1), // 'e'
588        (2, 2), // 'f'
589        (2, 3), // EOF
590    ];
591
592    #[test]
593    fn test_search_original_line_and_column_sequential() {
594        let mut builder = SourcemapBuilder::new(Path::new("x.js"), SOURCE);
595
596        #[expect(clippy::cast_possible_truncation)]
597        for (pos, (expected_line, expected_col)) in MAPPINGS.iter().copied().enumerate() {
598            let (line, col) = builder.search_original_line_and_column(pos as u32);
599            assert_eq!((line, col), (expected_line, expected_col), "Mismatch at position {pos}");
600        }
601    }
602
603    #[test]
604    fn test_search_original_line_and_column_reverse_sequential() {
605        let mut builder = SourcemapBuilder::new(Path::new("x.js"), SOURCE);
606
607        #[expect(clippy::cast_possible_truncation)]
608        for (pos, (expected_line, expected_col)) in MAPPINGS.iter().copied().enumerate().rev() {
609            let (line, col) = builder.search_original_line_and_column(pos as u32);
610            assert_eq!((line, col), (expected_line, expected_col), "Mismatch at position {pos}");
611        }
612    }
613
614    #[test]
615    fn test_search_original_line_and_column_non_sequential() {
616        let mut builder = SourcemapBuilder::new(Path::new("x.js"), SOURCE);
617
618        let indexes = [8, 0, 7, 1, 6, 2, 5, 3, 4];
619
620        for pos in indexes {
621            let (expected_line, expected_col) = MAPPINGS[pos as usize];
622            let (line, col) = builder.search_original_line_and_column(pos);
623            assert_eq!((line, col), (expected_line, expected_col), "Mismatch at position {pos}");
624        }
625    }
626}