use std::cell::{Cell, OnceCell, RefCell};
use std::collections::HashMap;
use std::fmt;
use std::io;
use serde::Deserialize;
use srcmap_codec::{DecodeError, vlq_encode_unsigned};
use srcmap_scopes::ScopeInfo;
pub mod js_identifiers;
pub mod source_view;
pub mod utils;
pub use source_view::SourceView;
const NO_SOURCE: u32 = u32::MAX;
const NO_NAME: u32 = u32::MAX;
#[derive(Debug, Clone, Copy)]
pub struct Mapping {
pub generated_line: u32,
pub generated_column: u32,
pub source: u32,
pub original_line: u32,
pub original_column: u32,
pub name: u32,
pub is_range_mapping: bool,
}
#[derive(Debug, Clone)]
pub struct OriginalLocation {
pub source: u32,
pub line: u32,
pub column: u32,
pub name: Option<u32>,
}
#[derive(Debug, Clone)]
pub struct GeneratedLocation {
pub line: u32,
pub column: u32,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum Bias {
#[default]
GreatestLowerBound,
LeastUpperBound,
}
#[derive(Debug, Clone)]
pub struct MappedRange {
pub source: u32,
pub original_start_line: u32,
pub original_start_column: u32,
pub original_end_line: u32,
pub original_end_column: u32,
}
#[derive(Debug)]
pub enum ParseError {
Json(serde_json::Error),
Vlq(DecodeError),
InvalidVersion(u32),
Scopes(srcmap_scopes::ScopesError),
NestedIndexMap,
SectionsNotOrdered,
InvalidDataUrl,
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Json(e) => write!(f, "JSON parse error: {e}"),
Self::Vlq(e) => write!(f, "VLQ decode error: {e}"),
Self::InvalidVersion(v) => write!(f, "unsupported source map version: {v}"),
Self::Scopes(e) => write!(f, "scopes decode error: {e}"),
Self::NestedIndexMap => write!(f, "section map must not be an indexed source map"),
Self::SectionsNotOrdered => {
write!(f, "sections must be in ascending (line, column) order")
}
Self::InvalidDataUrl => write!(f, "malformed data URL"),
}
}
}
impl std::error::Error for ParseError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::Json(e) => Some(e),
Self::Vlq(e) => Some(e),
Self::Scopes(e) => Some(e),
Self::InvalidVersion(_)
| Self::NestedIndexMap
| Self::SectionsNotOrdered
| Self::InvalidDataUrl => None,
}
}
}
impl From<serde_json::Error> for ParseError {
fn from(e: serde_json::Error) -> Self {
Self::Json(e)
}
}
impl From<DecodeError> for ParseError {
fn from(e: DecodeError) -> Self {
Self::Vlq(e)
}
}
impl From<srcmap_scopes::ScopesError> for ParseError {
fn from(e: srcmap_scopes::ScopesError) -> Self {
Self::Scopes(e)
}
}
pub fn resolve_sources(raw_sources: &[Option<String>], source_root: &str) -> Vec<String> {
raw_sources
.iter()
.map(|s| match s {
Some(s) if !source_root.is_empty() => format!("{source_root}{s}"),
Some(s) => s.clone(),
None => String::new(),
})
.collect()
}
fn build_source_map(sources: &[String]) -> HashMap<String, u32> {
sources.iter().enumerate().map(|(i, s)| (s.clone(), i as u32)).collect()
}
#[derive(Deserialize)]
struct RawSourceMap<'a> {
version: u32,
#[serde(default)]
file: Option<String>,
#[serde(default, rename = "sourceRoot")]
source_root: Option<String>,
#[serde(default)]
sources: Vec<Option<String>>,
#[serde(default, rename = "sourcesContent")]
sources_content: Option<Vec<Option<String>>>,
#[serde(default)]
names: Vec<String>,
#[serde(default, borrow)]
mappings: &'a str,
#[serde(default, rename = "ignoreList")]
ignore_list: Option<Vec<u32>>,
#[serde(default, rename = "x_google_ignoreList")]
x_google_ignore_list: Option<Vec<u32>>,
#[serde(default, rename = "debugId", alias = "debug_id")]
debug_id: Option<String>,
#[serde(default, borrow)]
scopes: Option<&'a str>,
#[serde(default, borrow, rename = "rangeMappings")]
range_mappings: Option<&'a str>,
#[serde(default)]
sections: Option<Vec<RawSection>>,
#[serde(flatten)]
extensions: HashMap<String, serde_json::Value>,
}
#[derive(Deserialize)]
struct RawSection {
offset: RawOffset,
map: Box<serde_json::value::RawValue>,
}
#[derive(Deserialize)]
struct RawOffset {
line: u32,
column: u32,
}
#[derive(Deserialize)]
pub struct RawSourceMapLite<'a> {
pub version: u32,
#[serde(default)]
pub file: Option<String>,
#[serde(default, rename = "sourceRoot")]
pub source_root: Option<String>,
#[serde(default)]
pub sources: Vec<Option<String>>,
#[serde(default)]
pub names: Vec<String>,
#[serde(default, borrow)]
pub mappings: &'a str,
#[serde(default, rename = "ignoreList")]
pub ignore_list: Option<Vec<u32>>,
#[serde(default, rename = "x_google_ignoreList")]
pub x_google_ignore_list: Option<Vec<u32>>,
#[serde(default, rename = "debugId", alias = "debug_id")]
pub debug_id: Option<String>,
#[serde(default, borrow)]
pub scopes: Option<&'a str>,
#[serde(default, borrow, rename = "rangeMappings")]
pub range_mappings: Option<&'a str>,
#[serde(default)]
pub sections: Option<Vec<serde_json::Value>>,
}
#[derive(Debug, Clone)]
pub struct SourceMap {
pub file: Option<String>,
pub source_root: Option<String>,
pub sources: Vec<String>,
pub sources_content: Vec<Option<String>>,
pub names: Vec<String>,
pub ignore_list: Vec<u32>,
pub extensions: HashMap<String, serde_json::Value>,
pub debug_id: Option<String>,
pub scopes: Option<ScopeInfo>,
mappings: Vec<Mapping>,
line_offsets: Vec<u32>,
reverse_index: OnceCell<Vec<u32>>,
source_map: HashMap<String, u32>,
has_range_mappings: bool,
}
impl SourceMap {
pub fn from_json(json: &str) -> Result<Self, ParseError> {
Self::from_json_inner(json, true)
}
pub fn from_json_no_content(json: &str) -> Result<Self, ParseError> {
let raw: RawSourceMapLite<'_> = serde_json::from_str(json)?;
if raw.version != 3 {
return Err(ParseError::InvalidVersion(raw.version));
}
let source_root = raw.source_root.as_deref().unwrap_or("");
let sources = resolve_sources(&raw.sources, source_root);
let source_map = build_source_map(&sources);
let (mut mappings, line_offsets) = decode_mappings(raw.mappings)?;
if let Some(range_mappings_str) = raw.range_mappings
&& !range_mappings_str.is_empty()
{
decode_range_mappings(range_mappings_str, &mut mappings, &line_offsets)?;
}
let num_sources = sources.len();
let scopes = match raw.scopes {
Some(scopes_str) if !scopes_str.is_empty() => {
Some(srcmap_scopes::decode_scopes(scopes_str, &raw.names, num_sources)?)
}
_ => None,
};
let ignore_list = match raw.ignore_list {
Some(list) => list,
None => raw.x_google_ignore_list.unwrap_or_default(),
};
let has_range_mappings = mappings.iter().any(|m| m.is_range_mapping);
Ok(Self {
file: raw.file,
source_root: raw.source_root,
sources,
sources_content: Vec::new(),
names: raw.names,
ignore_list,
extensions: HashMap::new(),
debug_id: raw.debug_id,
scopes,
mappings,
line_offsets,
reverse_index: OnceCell::new(),
source_map,
has_range_mappings,
})
}
fn from_json_inner(json: &str, allow_sections: bool) -> Result<Self, ParseError> {
let raw: RawSourceMap<'_> = serde_json::from_str(json)?;
if raw.version != 3 {
return Err(ParseError::InvalidVersion(raw.version));
}
if let Some(sections) = raw.sections {
if !allow_sections {
return Err(ParseError::NestedIndexMap);
}
return Self::from_sections(raw.file, sections);
}
Self::from_regular(raw)
}
fn from_regular(raw: RawSourceMap<'_>) -> Result<Self, ParseError> {
let source_root = raw.source_root.as_deref().unwrap_or("");
let sources = resolve_sources(&raw.sources, source_root);
let sources_content = raw.sources_content.unwrap_or_default();
let source_map = build_source_map(&sources);
let (mut mappings, line_offsets) = decode_mappings(raw.mappings)?;
if let Some(range_mappings_str) = raw.range_mappings
&& !range_mappings_str.is_empty()
{
decode_range_mappings(range_mappings_str, &mut mappings, &line_offsets)?;
}
let num_sources = sources.len();
let scopes = match raw.scopes {
Some(scopes_str) if !scopes_str.is_empty() => {
Some(srcmap_scopes::decode_scopes(scopes_str, &raw.names, num_sources)?)
}
_ => None,
};
let ignore_list = match raw.ignore_list {
Some(list) => list,
None => raw.x_google_ignore_list.unwrap_or_default(),
};
let extensions: HashMap<String, serde_json::Value> = raw
.extensions
.into_iter()
.filter(|(k, _)| k.starts_with("x_") || k.starts_with("x-"))
.collect();
let has_range_mappings = mappings.iter().any(|m| m.is_range_mapping);
Ok(Self {
file: raw.file,
source_root: raw.source_root,
sources,
sources_content,
names: raw.names,
ignore_list,
extensions,
debug_id: raw.debug_id,
scopes,
mappings,
line_offsets,
reverse_index: OnceCell::new(),
source_map,
has_range_mappings,
})
}
fn from_sections(file: Option<String>, sections: Vec<RawSection>) -> Result<Self, ParseError> {
let mut all_sources: Vec<String> = Vec::new();
let mut all_sources_content: Vec<Option<String>> = Vec::new();
let mut all_names: Vec<String> = Vec::new();
let mut all_mappings: Vec<Mapping> = Vec::new();
let mut all_ignore_list: Vec<u32> = Vec::new();
let mut max_line: u32 = 0;
let mut source_index_map: HashMap<String, u32> = HashMap::new();
let mut name_index_map: HashMap<String, u32> = HashMap::new();
for i in 1..sections.len() {
let prev = §ions[i - 1].offset;
let curr = §ions[i].offset;
if (curr.line, curr.column) <= (prev.line, prev.column) {
return Err(ParseError::SectionsNotOrdered);
}
}
for section in §ions {
let sub = Self::from_json_inner(section.map.get(), false)?;
let line_offset = section.offset.line;
let col_offset = section.offset.column;
let source_remap: Vec<u32> = sub
.sources
.iter()
.enumerate()
.map(|(i, s)| {
if let Some(&existing) = source_index_map.get(s) {
existing
} else {
let idx = all_sources.len() as u32;
all_sources.push(s.clone());
let content = sub.sources_content.get(i).cloned().unwrap_or(None);
all_sources_content.push(content);
source_index_map.insert(s.clone(), idx);
idx
}
})
.collect();
let name_remap: Vec<u32> = sub
.names
.iter()
.map(|n| {
if let Some(&existing) = name_index_map.get(n) {
existing
} else {
let idx = all_names.len() as u32;
all_names.push(n.clone());
name_index_map.insert(n.clone(), idx);
idx
}
})
.collect();
for &idx in &sub.ignore_list {
let global_idx = source_remap[idx as usize];
if !all_ignore_list.contains(&global_idx) {
all_ignore_list.push(global_idx);
}
}
for m in &sub.mappings {
let gen_line = m.generated_line + line_offset;
let gen_col = if m.generated_line == 0 {
m.generated_column + col_offset
} else {
m.generated_column
};
all_mappings.push(Mapping {
generated_line: gen_line,
generated_column: gen_col,
source: if m.source == NO_SOURCE {
NO_SOURCE
} else {
source_remap[m.source as usize]
},
original_line: m.original_line,
original_column: m.original_column,
name: if m.name == NO_NAME { NO_NAME } else { name_remap[m.name as usize] },
is_range_mapping: m.is_range_mapping,
});
if gen_line > max_line {
max_line = gen_line;
}
}
}
all_mappings.sort_unstable_by(|a, b| {
a.generated_line
.cmp(&b.generated_line)
.then(a.generated_column.cmp(&b.generated_column))
});
let line_count = if all_mappings.is_empty() { 0 } else { max_line as usize + 1 };
let mut line_offsets: Vec<u32> = vec![0; line_count + 1];
let mut current_line: usize = 0;
for (i, m) in all_mappings.iter().enumerate() {
while current_line < m.generated_line as usize {
current_line += 1;
if current_line < line_offsets.len() {
line_offsets[current_line] = i as u32;
}
}
}
if !line_offsets.is_empty() {
let last = all_mappings.len() as u32;
for offset in line_offsets.iter_mut().skip(current_line + 1) {
*offset = last;
}
}
let source_map = build_source_map(&all_sources);
let has_range_mappings = all_mappings.iter().any(|m| m.is_range_mapping);
Ok(Self {
file,
source_root: None,
sources: all_sources,
sources_content: all_sources_content,
names: all_names,
ignore_list: all_ignore_list,
extensions: HashMap::new(),
debug_id: None,
scopes: None, mappings: all_mappings,
line_offsets,
reverse_index: OnceCell::new(),
source_map,
has_range_mappings,
})
}
pub fn original_position_for(&self, line: u32, column: u32) -> Option<OriginalLocation> {
self.original_position_for_with_bias(line, column, Bias::GreatestLowerBound)
}
pub fn original_position_for_with_bias(
&self,
line: u32,
column: u32,
bias: Bias,
) -> Option<OriginalLocation> {
let line_idx = line as usize;
if line_idx + 1 >= self.line_offsets.len() {
return self.range_mapping_fallback(line, column);
}
let start = self.line_offsets[line_idx] as usize;
let end = self.line_offsets[line_idx + 1] as usize;
if start == end {
return self.range_mapping_fallback(line, column);
}
let line_mappings = &self.mappings[start..end];
let idx = match bias {
Bias::GreatestLowerBound => {
match line_mappings.binary_search_by_key(&column, |m| m.generated_column) {
Ok(i) => i,
Err(0) => return self.range_mapping_fallback(line, column),
Err(i) => i - 1,
}
}
Bias::LeastUpperBound => {
match line_mappings.binary_search_by_key(&column, |m| m.generated_column) {
Ok(i) => i,
Err(i) => {
if i >= line_mappings.len() {
return None;
}
i
}
}
}
};
let mapping = &line_mappings[idx];
if mapping.source == NO_SOURCE {
return None;
}
if mapping.is_range_mapping && column >= mapping.generated_column {
let column_delta = column - mapping.generated_column;
return Some(OriginalLocation {
source: mapping.source,
line: mapping.original_line,
column: mapping.original_column + column_delta,
name: if mapping.name == NO_NAME { None } else { Some(mapping.name) },
});
}
Some(OriginalLocation {
source: mapping.source,
line: mapping.original_line,
column: mapping.original_column,
name: if mapping.name == NO_NAME { None } else { Some(mapping.name) },
})
}
fn range_mapping_fallback(&self, line: u32, column: u32) -> Option<OriginalLocation> {
let line_idx = line as usize;
let search_end = if line_idx + 1 < self.line_offsets.len() {
self.line_offsets[line_idx] as usize
} else {
self.mappings.len()
};
if search_end == 0 {
return None;
}
let last_mapping = &self.mappings[search_end - 1];
if !last_mapping.is_range_mapping || last_mapping.source == NO_SOURCE {
return None;
}
let line_delta = line - last_mapping.generated_line;
let column_delta =
if line_delta == 0 { column.saturating_sub(last_mapping.generated_column) } else { 0 };
Some(OriginalLocation {
source: last_mapping.source,
line: last_mapping.original_line + line_delta,
column: last_mapping.original_column + column_delta,
name: if last_mapping.name == NO_NAME { None } else { Some(last_mapping.name) },
})
}
pub fn generated_position_for(
&self,
source: &str,
line: u32,
column: u32,
) -> Option<GeneratedLocation> {
self.generated_position_for_with_bias(source, line, column, Bias::GreatestLowerBound)
}
pub fn generated_position_for_with_bias(
&self,
source: &str,
line: u32,
column: u32,
bias: Bias,
) -> Option<GeneratedLocation> {
let &source_idx = self.source_map.get(source)?;
let reverse_index = self.reverse_index.get_or_init(|| build_reverse_index(&self.mappings));
let idx = reverse_index.partition_point(|&i| {
let m = &self.mappings[i as usize];
(m.source, m.original_line, m.original_column) < (source_idx, line, column)
});
match bias {
Bias::GreatestLowerBound => {
if idx < reverse_index.len() {
let mapping = &self.mappings[reverse_index[idx] as usize];
if mapping.source == source_idx
&& mapping.original_line == line
&& mapping.original_column == column
{
return Some(GeneratedLocation {
line: mapping.generated_line,
column: mapping.generated_column,
});
}
}
if idx == 0 {
return None;
}
let mapping = &self.mappings[reverse_index[idx - 1] as usize];
if mapping.source != source_idx || mapping.original_line != line {
return None;
}
Some(GeneratedLocation {
line: mapping.generated_line,
column: mapping.generated_column,
})
}
Bias::LeastUpperBound => {
if idx >= reverse_index.len() {
return None;
}
let mapping = &self.mappings[reverse_index[idx] as usize];
if mapping.source != source_idx || mapping.original_line != line {
return None;
}
if mapping.original_column == column {
let mut last_idx = idx;
while last_idx + 1 < reverse_index.len() {
let next = &self.mappings[reverse_index[last_idx + 1] as usize];
if next.source != source_idx
|| next.original_line != line
|| next.original_column != column
{
break;
}
last_idx += 1;
}
let last_mapping = &self.mappings[reverse_index[last_idx] as usize];
return Some(GeneratedLocation {
line: last_mapping.generated_line,
column: last_mapping.generated_column,
});
}
Some(GeneratedLocation {
line: mapping.generated_line,
column: mapping.generated_column,
})
}
}
}
pub fn all_generated_positions_for(
&self,
source: &str,
line: u32,
column: u32,
) -> Vec<GeneratedLocation> {
let Some(&source_idx) = self.source_map.get(source) else {
return Vec::new();
};
let reverse_index = self.reverse_index.get_or_init(|| build_reverse_index(&self.mappings));
let start = reverse_index.partition_point(|&i| {
let m = &self.mappings[i as usize];
(m.source, m.original_line, m.original_column) < (source_idx, line, column)
});
let mut results = Vec::new();
for &ri in &reverse_index[start..] {
let m = &self.mappings[ri as usize];
if m.source != source_idx || m.original_line != line || m.original_column != column {
break;
}
results.push(GeneratedLocation { line: m.generated_line, column: m.generated_column });
}
results
}
pub fn map_range(
&self,
start_line: u32,
start_column: u32,
end_line: u32,
end_column: u32,
) -> Option<MappedRange> {
let start = self.original_position_for(start_line, start_column)?;
let end = self.original_position_for(end_line, end_column)?;
if start.source != end.source {
return None;
}
Some(MappedRange {
source: start.source,
original_start_line: start.line,
original_start_column: start.column,
original_end_line: end.line,
original_end_column: end.column,
})
}
#[inline]
pub fn source(&self, index: u32) -> &str {
&self.sources[index as usize]
}
#[inline]
pub fn get_source(&self, index: u32) -> Option<&str> {
self.sources.get(index as usize).map(|s| s.as_str())
}
#[inline]
pub fn name(&self, index: u32) -> &str {
&self.names[index as usize]
}
#[inline]
pub fn get_name(&self, index: u32) -> Option<&str> {
self.names.get(index as usize).map(|s| s.as_str())
}
#[inline]
pub fn source_index(&self, name: &str) -> Option<u32> {
self.source_map.get(name).copied()
}
#[inline]
pub fn mapping_count(&self) -> usize {
self.mappings.len()
}
#[inline]
pub fn line_count(&self) -> usize {
self.line_offsets.len().saturating_sub(1)
}
#[inline]
pub fn mappings_for_line(&self, line: u32) -> &[Mapping] {
let line_idx = line as usize;
if line_idx + 1 >= self.line_offsets.len() {
return &[];
}
let start = self.line_offsets[line_idx] as usize;
let end = self.line_offsets[line_idx + 1] as usize;
&self.mappings[start..end]
}
#[inline]
pub fn all_mappings(&self) -> &[Mapping] {
&self.mappings
}
pub fn to_json(&self) -> String {
self.to_json_with_options(false)
}
pub fn to_json_with_options(&self, exclude_content: bool) -> String {
let mappings = self.encode_mappings();
let scopes_encoded = if let Some(ref scopes_info) = self.scopes {
let mut names_clone = self.names.clone();
let s = srcmap_scopes::encode_scopes(scopes_info, &mut names_clone);
Some((s, names_clone))
} else {
None
};
let names_for_json = match &scopes_encoded {
Some((_, expanded_names)) => expanded_names,
None => &self.names,
};
let source_root_prefix = self.source_root.as_deref().unwrap_or("");
let mut json = String::with_capacity(256 + mappings.len());
json.push_str(r#"{"version":3"#);
if let Some(ref file) = self.file {
json.push_str(r#","file":"#);
json_quote_into(&mut json, file);
}
if let Some(ref root) = self.source_root {
json.push_str(r#","sourceRoot":"#);
json_quote_into(&mut json, root);
}
json.push_str(r#","sources":["#);
for (i, s) in self.sources.iter().enumerate() {
if i > 0 {
json.push(',');
}
let source_name = if !source_root_prefix.is_empty() {
s.strip_prefix(source_root_prefix).unwrap_or(s)
} else {
s
};
json_quote_into(&mut json, source_name);
}
json.push(']');
if !exclude_content
&& !self.sources_content.is_empty()
&& self.sources_content.iter().any(|c| c.is_some())
{
json.push_str(r#","sourcesContent":["#);
for (i, c) in self.sources_content.iter().enumerate() {
if i > 0 {
json.push(',');
}
match c {
Some(content) => json_quote_into(&mut json, content),
None => json.push_str("null"),
}
}
json.push(']');
}
json.push_str(r#","names":["#);
for (i, n) in names_for_json.iter().enumerate() {
if i > 0 {
json.push(',');
}
json_quote_into(&mut json, n);
}
json.push(']');
json.push_str(r#","mappings":""#);
json.push_str(&mappings);
json.push('"');
if let Some(range_mappings) = self.encode_range_mappings() {
json.push_str(r#","rangeMappings":""#);
json.push_str(&range_mappings);
json.push('"');
}
if !self.ignore_list.is_empty() {
use std::fmt::Write;
json.push_str(r#","ignoreList":["#);
for (i, &idx) in self.ignore_list.iter().enumerate() {
if i > 0 {
json.push(',');
}
let _ = write!(json, "{idx}");
}
json.push(']');
}
if let Some(ref id) = self.debug_id {
json.push_str(r#","debugId":"#);
json_quote_into(&mut json, id);
}
if let Some((ref s, _)) = scopes_encoded {
json.push_str(r#","scopes":"#);
json_quote_into(&mut json, s);
}
let mut ext_keys: Vec<&String> = self.extensions.keys().collect();
ext_keys.sort();
for key in ext_keys {
if let Some(val) = self.extensions.get(key) {
json.push(',');
json_quote_into(&mut json, key);
json.push(':');
json.push_str(&serde_json::to_string(val).unwrap_or_default());
}
}
json.push('}');
json
}
#[allow(
clippy::too_many_arguments,
reason = "constructor-style API keeps the hot path allocation-free"
)]
pub fn from_parts(
file: Option<String>,
source_root: Option<String>,
sources: Vec<String>,
sources_content: Vec<Option<String>>,
names: Vec<String>,
mappings: Vec<Mapping>,
ignore_list: Vec<u32>,
debug_id: Option<String>,
scopes: Option<ScopeInfo>,
) -> Self {
let line_count = mappings.last().map_or(0, |m| m.generated_line as usize + 1);
let mut line_offsets: Vec<u32> = vec![0; line_count + 1];
let mut current_line: usize = 0;
for (i, m) in mappings.iter().enumerate() {
while current_line < m.generated_line as usize {
current_line += 1;
if current_line < line_offsets.len() {
line_offsets[current_line] = i as u32;
}
}
}
if !line_offsets.is_empty() {
let last = mappings.len() as u32;
for offset in line_offsets.iter_mut().skip(current_line + 1) {
*offset = last;
}
}
let source_map = build_source_map(&sources);
let has_range_mappings = mappings.iter().any(|m| m.is_range_mapping);
Self {
file,
source_root,
sources,
sources_content,
names,
ignore_list,
extensions: HashMap::new(),
debug_id,
scopes,
mappings,
line_offsets,
reverse_index: OnceCell::new(),
source_map,
has_range_mappings,
}
}
#[allow(clippy::too_many_arguments, reason = "WASM bindings pass parsed map parts directly")]
pub fn from_vlq(
mappings_str: &str,
sources: Vec<String>,
names: Vec<String>,
file: Option<String>,
source_root: Option<String>,
sources_content: Vec<Option<String>>,
ignore_list: Vec<u32>,
debug_id: Option<String>,
) -> Result<Self, ParseError> {
Self::from_vlq_with_range_mappings(
mappings_str,
sources,
names,
file,
source_root,
sources_content,
ignore_list,
debug_id,
None,
)
}
#[allow(
clippy::too_many_arguments,
reason = "range mappings are optional but share the same low-level constructor shape"
)]
pub fn from_vlq_with_range_mappings(
mappings_str: &str,
sources: Vec<String>,
names: Vec<String>,
file: Option<String>,
source_root: Option<String>,
sources_content: Vec<Option<String>>,
ignore_list: Vec<u32>,
debug_id: Option<String>,
range_mappings_str: Option<&str>,
) -> Result<Self, ParseError> {
let (mut mappings, line_offsets) = decode_mappings(mappings_str)?;
if let Some(rm_str) = range_mappings_str
&& !rm_str.is_empty()
{
decode_range_mappings(rm_str, &mut mappings, &line_offsets)?;
}
let source_map = build_source_map(&sources);
let has_range_mappings = mappings.iter().any(|m| m.is_range_mapping);
Ok(Self {
file,
source_root,
sources,
sources_content,
names,
ignore_list,
extensions: HashMap::new(),
debug_id,
scopes: None,
mappings,
line_offsets,
reverse_index: OnceCell::new(),
source_map,
has_range_mappings,
})
}
pub fn builder() -> SourceMapBuilder {
SourceMapBuilder::new()
}
pub fn from_json_lines(json: &str, start_line: u32, end_line: u32) -> Result<Self, ParseError> {
let raw: RawSourceMap<'_> = serde_json::from_str(json)?;
if raw.version != 3 {
return Err(ParseError::InvalidVersion(raw.version));
}
let source_root = raw.source_root.as_deref().unwrap_or("");
let sources = resolve_sources(&raw.sources, source_root);
let sources_content = raw.sources_content.unwrap_or_default();
let source_map = build_source_map(&sources);
let (mappings, line_offsets) = decode_mappings_range(raw.mappings, start_line, end_line)?;
let num_sources = sources.len();
let scopes = match raw.scopes {
Some(scopes_str) if !scopes_str.is_empty() => {
Some(srcmap_scopes::decode_scopes(scopes_str, &raw.names, num_sources)?)
}
_ => None,
};
let ignore_list = match raw.ignore_list {
Some(list) => list,
None => raw.x_google_ignore_list.unwrap_or_default(),
};
let extensions: HashMap<String, serde_json::Value> = raw
.extensions
.into_iter()
.filter(|(k, _)| k.starts_with("x_") || k.starts_with("x-"))
.collect();
let has_range_mappings = mappings.iter().any(|m| m.is_range_mapping);
Ok(Self {
file: raw.file,
source_root: raw.source_root,
sources,
sources_content,
names: raw.names,
ignore_list,
extensions,
debug_id: raw.debug_id,
scopes,
mappings,
line_offsets,
reverse_index: OnceCell::new(),
source_map,
has_range_mappings,
})
}
pub fn encode_mappings(&self) -> String {
if self.mappings.is_empty() {
return String::new();
}
let mut out: Vec<u8> = Vec::with_capacity(self.mappings.len() * 6);
let mut prev_gen_col: i64 = 0;
let mut prev_source: i64 = 0;
let mut prev_orig_line: i64 = 0;
let mut prev_orig_col: i64 = 0;
let mut prev_name: i64 = 0;
let mut prev_gen_line: u32 = 0;
let mut first_in_line = true;
for m in &self.mappings {
while prev_gen_line < m.generated_line {
out.push(b';');
prev_gen_line += 1;
prev_gen_col = 0;
first_in_line = true;
}
if !first_in_line {
out.push(b',');
}
first_in_line = false;
srcmap_codec::vlq_encode(&mut out, m.generated_column as i64 - prev_gen_col);
prev_gen_col = m.generated_column as i64;
if m.source != NO_SOURCE {
srcmap_codec::vlq_encode(&mut out, m.source as i64 - prev_source);
prev_source = m.source as i64;
srcmap_codec::vlq_encode(&mut out, m.original_line as i64 - prev_orig_line);
prev_orig_line = m.original_line as i64;
srcmap_codec::vlq_encode(&mut out, m.original_column as i64 - prev_orig_col);
prev_orig_col = m.original_column as i64;
if m.name != NO_NAME {
srcmap_codec::vlq_encode(&mut out, m.name as i64 - prev_name);
prev_name = m.name as i64;
}
}
}
debug_assert!(out.is_ascii());
unsafe { String::from_utf8_unchecked(out) }
}
pub fn encode_range_mappings(&self) -> Option<String> {
if !self.has_range_mappings {
return None;
}
let line_count = self.line_offsets.len().saturating_sub(1);
let mut out: Vec<u8> = Vec::new();
for line_idx in 0..line_count {
if line_idx > 0 {
out.push(b';');
}
let start = self.line_offsets[line_idx] as usize;
let end = self.line_offsets[line_idx + 1] as usize;
let mut prev_offset: u64 = 0;
let mut first_on_line = true;
for (i, mapping) in self.mappings[start..end].iter().enumerate() {
if mapping.is_range_mapping {
if !first_on_line {
out.push(b',');
}
first_on_line = false;
vlq_encode_unsigned(&mut out, i as u64 - prev_offset);
prev_offset = i as u64;
}
}
}
while out.last() == Some(&b';') {
out.pop();
}
if out.is_empty() {
return None;
}
debug_assert!(out.is_ascii());
Some(unsafe { String::from_utf8_unchecked(out) })
}
#[inline]
pub fn has_range_mappings(&self) -> bool {
self.has_range_mappings
}
#[inline]
pub fn range_mapping_count(&self) -> usize {
self.mappings.iter().filter(|m| m.is_range_mapping).count()
}
pub fn from_data_url(url: &str) -> Result<Self, ParseError> {
let rest = url.strip_prefix("data:application/json").ok_or(ParseError::InvalidDataUrl)?;
let json = if let Some(data) = rest
.strip_prefix(";base64,")
.or_else(|| rest.strip_prefix(";charset=utf-8;base64,"))
.or_else(|| rest.strip_prefix(";charset=UTF-8;base64,"))
{
base64_decode(data).ok_or(ParseError::InvalidDataUrl)?
} else if let Some(data) = rest.strip_prefix(',') {
if data.contains('%') { percent_decode(data) } else { data.to_string() }
} else {
return Err(ParseError::InvalidDataUrl);
};
Self::from_json(&json)
}
pub fn to_writer(&self, mut writer: impl io::Write) -> io::Result<()> {
let json = self.to_json();
writer.write_all(json.as_bytes())
}
pub fn to_writer_with_options(
&self,
mut writer: impl io::Write,
exclude_content: bool,
) -> io::Result<()> {
let json = self.to_json_with_options(exclude_content);
writer.write_all(json.as_bytes())
}
pub fn to_data_url(&self) -> String {
utils::to_data_url(&self.to_json())
}
pub fn set_file(&mut self, file: Option<String>) {
self.file = file;
}
pub fn set_source_root(&mut self, source_root: Option<String>) {
self.source_root = source_root;
}
pub fn set_debug_id(&mut self, debug_id: Option<String>) {
self.debug_id = debug_id;
}
pub fn set_ignore_list(&mut self, ignore_list: Vec<u32>) {
self.ignore_list = ignore_list;
}
pub fn set_sources(&mut self, sources: Vec<Option<String>>) {
let source_root = self.source_root.as_deref().unwrap_or("");
self.sources = resolve_sources(&sources, source_root);
self.source_map = build_source_map(&self.sources);
self.reverse_index = OnceCell::new();
}
}
#[derive(Debug, Clone, Copy, Default)]
struct VlqState {
source_index: i64,
original_line: i64,
original_column: i64,
name_index: i64,
}
#[derive(Debug, Clone)]
struct LineInfo {
byte_offset: usize,
byte_end: usize,
state: VlqState,
}
#[derive(Debug)]
pub struct LazySourceMap {
pub file: Option<String>,
pub source_root: Option<String>,
pub sources: Vec<String>,
pub sources_content: Vec<Option<String>>,
pub names: Vec<String>,
pub ignore_list: Vec<u32>,
pub extensions: HashMap<String, serde_json::Value>,
pub debug_id: Option<String>,
pub scopes: Option<ScopeInfo>,
raw_mappings: String,
line_info: Vec<LineInfo>,
decoded_lines: RefCell<HashMap<u32, Vec<Mapping>>>,
source_map: HashMap<String, u32>,
fast_scan: bool,
decode_watermark: Cell<u32>,
decode_state: Cell<VlqState>,
}
impl LazySourceMap {
#[allow(
clippy::too_many_arguments,
reason = "private constructor centralizes shared LazySourceMap setup"
)]
fn new_inner(
file: Option<String>,
source_root: Option<String>,
sources: Vec<String>,
sources_content: Vec<Option<String>>,
names: Vec<String>,
ignore_list: Vec<u32>,
extensions: HashMap<String, serde_json::Value>,
debug_id: Option<String>,
scopes: Option<ScopeInfo>,
raw_mappings: String,
line_info: Vec<LineInfo>,
source_map: HashMap<String, u32>,
fast_scan: bool,
) -> Self {
Self {
file,
source_root,
sources,
sources_content,
names,
ignore_list,
extensions,
debug_id,
scopes,
raw_mappings,
line_info,
decoded_lines: RefCell::new(HashMap::new()),
source_map,
fast_scan,
decode_watermark: Cell::new(0),
decode_state: Cell::new(VlqState::default()),
}
}
pub fn from_json(json: &str) -> Result<Self, ParseError> {
let raw: RawSourceMap<'_> = serde_json::from_str(json)?;
if raw.version != 3 {
return Err(ParseError::InvalidVersion(raw.version));
}
let source_root = raw.source_root.as_deref().unwrap_or("");
let sources = resolve_sources(&raw.sources, source_root);
let sources_content = raw.sources_content.unwrap_or_default();
let source_map = build_source_map(&sources);
let raw_mappings = raw.mappings.to_string();
let line_info = prescan_mappings(&raw_mappings)?;
let num_sources = sources.len();
let scopes = match raw.scopes {
Some(scopes_str) if !scopes_str.is_empty() => {
Some(srcmap_scopes::decode_scopes(scopes_str, &raw.names, num_sources)?)
}
_ => None,
};
let ignore_list = match raw.ignore_list {
Some(list) => list,
None => raw.x_google_ignore_list.unwrap_or_default(),
};
let extensions: HashMap<String, serde_json::Value> = raw
.extensions
.into_iter()
.filter(|(k, _)| k.starts_with("x_") || k.starts_with("x-"))
.collect();
Ok(Self::new_inner(
raw.file,
raw.source_root,
sources,
sources_content,
raw.names,
ignore_list,
extensions,
raw.debug_id,
scopes,
raw_mappings,
line_info,
source_map,
false,
))
}
pub fn from_json_no_content(json: &str) -> Result<Self, ParseError> {
let raw: RawSourceMapLite<'_> = serde_json::from_str(json)?;
if raw.version != 3 {
return Err(ParseError::InvalidVersion(raw.version));
}
if raw.sections.is_some() {
return Err(ParseError::NestedIndexMap);
}
let source_root = raw.source_root.as_deref().unwrap_or("");
let sources = resolve_sources(&raw.sources, source_root);
let source_map = build_source_map(&sources);
let raw_mappings = raw.mappings.to_string();
let line_info = prescan_mappings(&raw_mappings)?;
let num_sources = sources.len();
let scopes = match raw.scopes {
Some(scopes_str) if !scopes_str.is_empty() => {
Some(srcmap_scopes::decode_scopes(scopes_str, &raw.names, num_sources)?)
}
_ => None,
};
let ignore_list = match raw.ignore_list {
Some(list) => list,
None => raw.x_google_ignore_list.unwrap_or_default(),
};
Ok(Self::new_inner(
raw.file,
raw.source_root,
sources,
Vec::new(),
raw.names,
ignore_list,
HashMap::new(),
raw.debug_id,
scopes,
raw_mappings,
line_info,
source_map,
false,
))
}
pub fn from_vlq(
mappings: &str,
sources: Vec<String>,
names: Vec<String>,
file: Option<String>,
source_root: Option<String>,
ignore_list: Vec<u32>,
debug_id: Option<String>,
) -> Result<Self, ParseError> {
let source_map = build_source_map(&sources);
let raw_mappings = mappings.to_string();
let line_info = prescan_mappings(&raw_mappings)?;
Ok(Self::new_inner(
file,
source_root,
sources,
Vec::new(),
names,
ignore_list,
HashMap::new(),
debug_id,
None,
raw_mappings,
line_info,
source_map,
false,
))
}
pub fn from_json_fast(json: &str) -> Result<Self, ParseError> {
let raw: RawSourceMapLite<'_> = serde_json::from_str(json)?;
if raw.version != 3 {
return Err(ParseError::InvalidVersion(raw.version));
}
if raw.sections.is_some() {
return Err(ParseError::NestedIndexMap);
}
let source_root = raw.source_root.as_deref().unwrap_or("");
let sources = resolve_sources(&raw.sources, source_root);
let source_map = build_source_map(&sources);
let raw_mappings = raw.mappings.to_string();
let line_info = fast_scan_lines(&raw_mappings);
let ignore_list = match raw.ignore_list {
Some(list) => list,
None => raw.x_google_ignore_list.unwrap_or_default(),
};
Ok(Self::new_inner(
raw.file,
raw.source_root,
sources,
Vec::new(),
raw.names,
ignore_list,
HashMap::new(),
raw.debug_id,
None,
raw_mappings,
line_info,
source_map,
true,
))
}
fn decode_line_with_state(
&self,
line: u32,
mut state: VlqState,
) -> Result<(Vec<Mapping>, VlqState), DecodeError> {
let line_idx = line as usize;
if line_idx >= self.line_info.len() {
return Ok((Vec::new(), state));
}
let info = &self.line_info[line_idx];
let bytes = self.raw_mappings.as_bytes();
let end = info.byte_end;
let mut mappings = Vec::new();
let mut source_index = state.source_index;
let mut original_line = state.original_line;
let mut original_column = state.original_column;
let mut name_index = state.name_index;
let mut generated_column: i64 = 0;
let mut pos = info.byte_offset;
while pos < end {
let byte = bytes[pos];
if byte == b',' {
pos += 1;
continue;
}
generated_column += vlq_fast(bytes, &mut pos)?;
if pos < end && bytes[pos] != b',' && bytes[pos] != b';' {
source_index += vlq_fast(bytes, &mut pos)?;
if pos >= end || bytes[pos] == b',' || bytes[pos] == b';' {
return Err(DecodeError::InvalidSegmentLength { fields: 2, offset: pos });
}
original_line += vlq_fast(bytes, &mut pos)?;
if pos >= end || bytes[pos] == b',' || bytes[pos] == b';' {
return Err(DecodeError::InvalidSegmentLength { fields: 3, offset: pos });
}
original_column += vlq_fast(bytes, &mut pos)?;
let name = if pos < end && bytes[pos] != b',' && bytes[pos] != b';' {
name_index += vlq_fast(bytes, &mut pos)?;
name_index as u32
} else {
NO_NAME
};
mappings.push(Mapping {
generated_line: line,
generated_column: generated_column as u32,
source: source_index as u32,
original_line: original_line as u32,
original_column: original_column as u32,
name,
is_range_mapping: false,
});
} else {
mappings.push(Mapping {
generated_line: line,
generated_column: generated_column as u32,
source: NO_SOURCE,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
});
}
}
state.source_index = source_index;
state.original_line = original_line;
state.original_column = original_column;
state.name_index = name_index;
Ok((mappings, state))
}
pub fn decode_line(&self, line: u32) -> Result<Vec<Mapping>, DecodeError> {
if let Some(cached) = self.decoded_lines.borrow().get(&line) {
return Ok(cached.clone());
}
let line_idx = line as usize;
if line_idx >= self.line_info.len() {
return Ok(Vec::new());
}
if self.fast_scan {
let watermark = self.decode_watermark.get();
let start = if line >= watermark { watermark } else { 0 };
let mut state = if line >= watermark {
self.decode_state.get()
} else {
VlqState { source_index: 0, original_line: 0, original_column: 0, name_index: 0 }
};
for l in start..=line {
let info = &self.line_info[l as usize];
if self.decoded_lines.borrow().contains_key(&l) {
let bytes = self.raw_mappings.as_bytes();
state = walk_vlq_state(bytes, info.byte_offset, info.byte_end, state)?;
} else {
let (mappings, new_state) = self.decode_line_with_state(l, state)?;
state = new_state;
self.decoded_lines.borrow_mut().insert(l, mappings);
}
}
if line + 1 > self.decode_watermark.get() {
self.decode_watermark.set(line + 1);
self.decode_state.set(state);
}
let cached = self.decoded_lines.borrow().get(&line).cloned();
return Ok(cached.unwrap_or_default());
}
let state = self.line_info[line_idx].state;
let (mappings, _) = self.decode_line_with_state(line, state)?;
self.decoded_lines.borrow_mut().insert(line, mappings.clone());
Ok(mappings)
}
pub fn original_position_for(&self, line: u32, column: u32) -> Option<OriginalLocation> {
let line_mappings = self.decode_line(line).ok()?;
if line_mappings.is_empty() {
return None;
}
let idx = match line_mappings.binary_search_by_key(&column, |m| m.generated_column) {
Ok(i) => i,
Err(0) => return None,
Err(i) => i - 1,
};
let mapping = &line_mappings[idx];
if mapping.source == NO_SOURCE {
return None;
}
Some(OriginalLocation {
source: mapping.source,
line: mapping.original_line,
column: mapping.original_column,
name: if mapping.name == NO_NAME { None } else { Some(mapping.name) },
})
}
#[inline]
pub fn line_count(&self) -> usize {
self.line_info.len()
}
#[inline]
pub fn source(&self, index: u32) -> &str {
&self.sources[index as usize]
}
#[inline]
pub fn get_source(&self, index: u32) -> Option<&str> {
self.sources.get(index as usize).map(|s| s.as_str())
}
#[inline]
pub fn name(&self, index: u32) -> &str {
&self.names[index as usize]
}
#[inline]
pub fn get_name(&self, index: u32) -> Option<&str> {
self.names.get(index as usize).map(|s| s.as_str())
}
#[inline]
pub fn source_index(&self, name: &str) -> Option<u32> {
self.source_map.get(name).copied()
}
pub fn mappings_for_line(&self, line: u32) -> Vec<Mapping> {
self.decode_line(line).unwrap_or_default()
}
pub fn into_sourcemap(self) -> Result<SourceMap, ParseError> {
let (mappings, line_offsets) = decode_mappings(&self.raw_mappings)?;
let has_range_mappings = mappings.iter().any(|m| m.is_range_mapping);
Ok(SourceMap {
file: self.file,
source_root: self.source_root,
sources: self.sources.clone(),
sources_content: self.sources_content,
names: self.names,
ignore_list: self.ignore_list,
extensions: self.extensions,
debug_id: self.debug_id,
scopes: self.scopes,
mappings,
line_offsets,
reverse_index: OnceCell::new(),
source_map: self.source_map,
has_range_mappings,
})
}
}
fn prescan_mappings(input: &str) -> Result<Vec<LineInfo>, DecodeError> {
if input.is_empty() {
return Ok(Vec::new());
}
let bytes = input.as_bytes();
let len = bytes.len();
let line_count = bytes.iter().filter(|&&b| b == b';').count() + 1;
let mut line_info: Vec<LineInfo> = Vec::with_capacity(line_count);
let mut state = VlqState::default();
let mut pos: usize = 0;
loop {
let line_start = pos;
let line_state = state;
while pos < len && bytes[pos] != b';' {
pos += 1;
}
let byte_end = pos;
state = walk_vlq_state(bytes, line_start, byte_end, state)?;
line_info.push(LineInfo { byte_offset: line_start, byte_end, state: line_state });
if pos >= len {
break;
}
pos += 1;
}
Ok(line_info)
}
fn walk_vlq_state(
bytes: &[u8],
start: usize,
end: usize,
mut state: VlqState,
) -> Result<VlqState, DecodeError> {
let mut pos = start;
while pos < end {
let byte = bytes[pos];
if byte == b',' {
pos += 1;
continue;
}
vlq_fast(bytes, &mut pos)?;
if pos < end && bytes[pos] != b',' && bytes[pos] != b';' {
state.source_index += vlq_fast(bytes, &mut pos)?;
if pos >= end || bytes[pos] == b',' || bytes[pos] == b';' {
return Err(DecodeError::InvalidSegmentLength { fields: 2, offset: pos });
}
state.original_line += vlq_fast(bytes, &mut pos)?;
if pos >= end || bytes[pos] == b',' || bytes[pos] == b';' {
return Err(DecodeError::InvalidSegmentLength { fields: 3, offset: pos });
}
state.original_column += vlq_fast(bytes, &mut pos)?;
if pos < end && bytes[pos] != b',' && bytes[pos] != b';' {
state.name_index += vlq_fast(bytes, &mut pos)?;
}
}
}
Ok(state)
}
fn fast_scan_lines(input: &str) -> Vec<LineInfo> {
if input.is_empty() {
return Vec::new();
}
let bytes = input.as_bytes();
let len = bytes.len();
let zero_state =
VlqState { source_index: 0, original_line: 0, original_column: 0, name_index: 0 };
let mut line_info = Vec::new();
let mut pos = 0;
loop {
let line_start = pos;
while pos < len && bytes[pos] != b';' {
pos += 1;
}
line_info.push(LineInfo {
byte_offset: line_start,
byte_end: pos,
state: zero_state, });
if pos >= len {
break;
}
pos += 1; }
line_info
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SourceMappingUrl {
Inline(String),
External(String),
}
pub fn parse_source_mapping_url(source: &str) -> Option<SourceMappingUrl> {
for line in source.lines().rev() {
let trimmed = line.trim();
let url = if let Some(rest) = trimmed.strip_prefix("//# sourceMappingURL=") {
rest.trim()
} else if let Some(rest) = trimmed.strip_prefix("//@ sourceMappingURL=") {
rest.trim()
} else if let Some(rest) = trimmed.strip_prefix("/*# sourceMappingURL=") {
rest.trim_end_matches("*/").trim()
} else if let Some(rest) = trimmed.strip_prefix("/*@ sourceMappingURL=") {
rest.trim_end_matches("*/").trim()
} else {
continue;
};
if url.is_empty() {
continue;
}
if let Some(base64_data) = url
.strip_prefix("data:application/json;base64,")
.or_else(|| url.strip_prefix("data:application/json;charset=utf-8;base64,"))
.or_else(|| url.strip_prefix("data:application/json;charset=UTF-8;base64,"))
{
let decoded = base64_decode(base64_data);
if let Some(json) = decoded {
return Some(SourceMappingUrl::Inline(json));
}
}
return Some(SourceMappingUrl::External(url.to_string()));
}
None
}
fn percent_decode(input: &str) -> String {
let mut output = Vec::with_capacity(input.len());
let bytes = input.as_bytes();
let mut i = 0;
while i < bytes.len() {
if bytes[i] == b'%'
&& i + 2 < bytes.len()
&& let (Some(hi), Some(lo)) = (hex_val(bytes[i + 1]), hex_val(bytes[i + 2]))
{
output.push((hi << 4) | lo);
i += 3;
continue;
}
output.push(bytes[i]);
i += 1;
}
String::from_utf8(output).unwrap_or_else(|_| input.to_string())
}
fn hex_val(b: u8) -> Option<u8> {
match b {
b'0'..=b'9' => Some(b - b'0'),
b'a'..=b'f' => Some(b - b'a' + 10),
b'A'..=b'F' => Some(b - b'A' + 10),
_ => None,
}
}
fn base64_decode(input: &str) -> Option<String> {
let input = input.trim();
let bytes: Vec<u8> = input.bytes().filter(|b| !b.is_ascii_whitespace()).collect();
let mut output = Vec::with_capacity(bytes.len() * 3 / 4);
for chunk in bytes.chunks(4) {
let mut buf = [0u8; 4];
let mut len = 0;
for &b in chunk {
if b == b'=' {
break;
}
let val = match b {
b'A'..=b'Z' => b - b'A',
b'a'..=b'z' => b - b'a' + 26,
b'0'..=b'9' => b - b'0' + 52,
b'+' => 62,
b'/' => 63,
_ => return None,
};
buf[len] = val;
len += 1;
}
if len >= 2 {
output.push((buf[0] << 2) | (buf[1] >> 4));
}
if len >= 3 {
output.push((buf[1] << 4) | (buf[2] >> 2));
}
if len >= 4 {
output.push((buf[2] << 6) | buf[3]);
}
}
String::from_utf8(output).ok()
}
pub fn validate_deep(sm: &SourceMap) -> Vec<String> {
let mut warnings = Vec::new();
let mut prev_line: u32 = 0;
let mut prev_col: u32 = 0;
let mappings = sm.all_mappings();
for m in mappings {
if m.generated_line < prev_line
|| (m.generated_line == prev_line && m.generated_column < prev_col)
{
warnings.push(format!(
"mappings out of order at {}:{}",
m.generated_line, m.generated_column
));
}
prev_line = m.generated_line;
prev_col = m.generated_column;
}
for m in mappings {
if m.source != NO_SOURCE && m.source as usize >= sm.sources.len() {
warnings.push(format!(
"source index {} out of bounds (max {})",
m.source,
sm.sources.len()
));
}
if m.name != NO_NAME && m.name as usize >= sm.names.len() {
warnings.push(format!("name index {} out of bounds (max {})", m.name, sm.names.len()));
}
}
for &idx in &sm.ignore_list {
if idx as usize >= sm.sources.len() {
warnings.push(format!(
"ignoreList index {} out of bounds (max {})",
idx,
sm.sources.len()
));
}
}
let mut referenced_sources = std::collections::HashSet::new();
for m in mappings {
if m.source != NO_SOURCE {
referenced_sources.insert(m.source);
}
}
for (i, source) in sm.sources.iter().enumerate() {
if !referenced_sources.contains(&(i as u32)) {
warnings.push(format!("source \"{source}\" (index {i}) is unreferenced"));
}
}
warnings
}
fn json_quote_into(out: &mut String, s: &str) {
let bytes = s.as_bytes();
out.push('"');
let mut start = 0;
for (i, &b) in bytes.iter().enumerate() {
let escape = match b {
b'"' => "\\\"",
b'\\' => "\\\\",
b'\n' => "\\n",
b'\r' => "\\r",
b'\t' => "\\t",
0x00..=0x08 | 0x0b | 0x0c | 0x0e..=0x1f => {
if start < i {
out.push_str(&s[start..i]);
}
use std::fmt::Write;
let _ = write!(out, "\\u{:04x}", b);
start = i + 1;
continue;
}
_ => continue,
};
if start < i {
out.push_str(&s[start..i]);
}
out.push_str(escape);
start = i + 1;
}
if start < bytes.len() {
out.push_str(&s[start..]);
}
out.push('"');
}
const B64: [u8; 128] = {
let mut table = [0xFFu8; 128];
let chars = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
let mut i = 0u8;
while i < 64 {
table[chars[i as usize] as usize] = i;
i += 1;
}
table
};
#[inline(always)]
fn vlq_fast(bytes: &[u8], pos: &mut usize) -> Result<i64, DecodeError> {
let p = *pos;
if p >= bytes.len() {
return Err(DecodeError::UnexpectedEof { offset: p });
}
let b0 = bytes[p];
if b0 >= 128 {
return Err(DecodeError::InvalidBase64 { byte: b0, offset: p });
}
let d0 = B64[b0 as usize];
if d0 == 0xFF {
return Err(DecodeError::InvalidBase64 { byte: b0, offset: p });
}
if (d0 & 0x20) == 0 {
*pos = p + 1;
let val = (d0 >> 1) as i64;
return Ok(if (d0 & 1) != 0 { -val } else { val });
}
let mut result: u64 = (d0 & 0x1F) as u64;
let mut shift: u32 = 5;
let mut i = p + 1;
loop {
if i >= bytes.len() {
return Err(DecodeError::UnexpectedEof { offset: i });
}
let b = bytes[i];
if b >= 128 {
return Err(DecodeError::InvalidBase64 { byte: b, offset: i });
}
let d = B64[b as usize];
if d == 0xFF {
return Err(DecodeError::InvalidBase64 { byte: b, offset: i });
}
i += 1;
if shift >= 60 {
return Err(DecodeError::VlqOverflow { offset: p });
}
result += ((d & 0x1F) as u64) << shift;
shift += 5;
if (d & 0x20) == 0 {
break;
}
}
*pos = i;
let value = if (result & 1) == 1 { -((result >> 1) as i64) } else { (result >> 1) as i64 };
Ok(value)
}
#[inline(always)]
fn vlq_unsigned_fast(bytes: &[u8], pos: &mut usize) -> Result<u64, DecodeError> {
let p = *pos;
if p >= bytes.len() {
return Err(DecodeError::UnexpectedEof { offset: p });
}
let b0 = bytes[p];
if b0 >= 128 {
return Err(DecodeError::InvalidBase64 { byte: b0, offset: p });
}
let d0 = B64[b0 as usize];
if d0 == 0xFF {
return Err(DecodeError::InvalidBase64 { byte: b0, offset: p });
}
if (d0 & 0x20) == 0 {
*pos = p + 1;
return Ok(d0 as u64);
}
let mut result: u64 = (d0 & 0x1F) as u64;
let mut shift: u32 = 5;
let mut i = p + 1;
loop {
if i >= bytes.len() {
return Err(DecodeError::UnexpectedEof { offset: i });
}
let b = bytes[i];
if b >= 128 {
return Err(DecodeError::InvalidBase64 { byte: b, offset: i });
}
let d = B64[b as usize];
if d == 0xFF {
return Err(DecodeError::InvalidBase64 { byte: b, offset: i });
}
i += 1;
if shift >= 60 {
return Err(DecodeError::VlqOverflow { offset: p });
}
result |= ((d & 0x1F) as u64) << shift;
shift += 5;
if (d & 0x20) == 0 {
break;
}
}
*pos = i;
Ok(result)
}
fn decode_range_mappings(
input: &str,
mappings: &mut [Mapping],
line_offsets: &[u32],
) -> Result<(), DecodeError> {
let bytes = input.as_bytes();
let len = bytes.len();
let mut pos: usize = 0;
let mut generated_line: usize = 0;
while pos < len {
let line_start = if generated_line + 1 < line_offsets.len() {
line_offsets[generated_line] as usize
} else {
break;
};
let line_end = if generated_line + 2 < line_offsets.len() {
line_offsets[generated_line + 1] as usize
} else {
mappings.len()
};
let mut mapping_index: u64 = 0;
while pos < len {
let byte = bytes[pos];
if byte == b';' {
pos += 1;
break;
}
if byte == b',' {
pos += 1;
continue;
}
let offset = vlq_unsigned_fast(bytes, &mut pos)?;
mapping_index += offset;
let abs_idx = line_start + mapping_index as usize;
if abs_idx < line_end {
mappings[abs_idx].is_range_mapping = true;
}
}
generated_line += 1;
}
Ok(())
}
#[derive(Default)]
struct MappingsDecodeState {
source_index: i64,
original_line: i64,
original_column: i64,
name_index: i64,
}
fn decode_mapping_segment(
bytes: &[u8],
pos: &mut usize,
generated_line: u32,
generated_column: &mut i64,
state: &mut MappingsDecodeState,
) -> Result<Mapping, DecodeError> {
*generated_column += vlq_fast(bytes, pos)?;
if *pos < bytes.len() && bytes[*pos] != b',' && bytes[*pos] != b';' {
state.source_index += vlq_fast(bytes, pos)?;
if *pos >= bytes.len() || bytes[*pos] == b',' || bytes[*pos] == b';' {
return Err(DecodeError::InvalidSegmentLength { fields: 2, offset: *pos });
}
state.original_line += vlq_fast(bytes, pos)?;
if *pos >= bytes.len() || bytes[*pos] == b',' || bytes[*pos] == b';' {
return Err(DecodeError::InvalidSegmentLength { fields: 3, offset: *pos });
}
state.original_column += vlq_fast(bytes, pos)?;
let name = if *pos < bytes.len() && bytes[*pos] != b',' && bytes[*pos] != b';' {
state.name_index += vlq_fast(bytes, pos)?;
state.name_index as u32
} else {
NO_NAME
};
Ok(Mapping {
generated_line,
generated_column: *generated_column as u32,
source: state.source_index as u32,
original_line: state.original_line as u32,
original_column: state.original_column as u32,
name,
is_range_mapping: false,
})
} else {
Ok(Mapping {
generated_line,
generated_column: *generated_column as u32,
source: NO_SOURCE,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
})
}
}
fn build_range_line_offsets(
start_line: u32,
end_line: u32,
line_starts: &[(u32, u32)],
total: u32,
) -> Vec<u32> {
let mut line_offsets: Vec<u32> = vec![total; end_line as usize + 1];
for offset in line_offsets.iter_mut().take(start_line as usize + 1) {
*offset = 0;
}
for &(line, offset) in line_starts {
line_offsets[line as usize] = offset;
}
let mut next_offset = total;
for i in (start_line as usize..end_line as usize).rev() {
if line_offsets[i] == total {
line_offsets[i] = next_offset;
} else {
next_offset = line_offsets[i];
}
}
line_offsets
}
fn decode_mappings(input: &str) -> Result<(Vec<Mapping>, Vec<u32>), DecodeError> {
if input.is_empty() {
return Ok((Vec::new(), vec![0]));
}
let bytes = input.as_bytes();
let len = bytes.len();
let mut semicolons = 0usize;
let mut commas = 0usize;
for &b in bytes {
semicolons += (b == b';') as usize;
commas += (b == b',') as usize;
}
let line_count = semicolons + 1;
let approx_segments = commas + line_count;
let mut mappings: Vec<Mapping> = Vec::with_capacity(approx_segments);
let mut line_offsets: Vec<u32> = Vec::with_capacity(line_count + 1);
let mut state = MappingsDecodeState::default();
let mut generated_line: u32 = 0;
let mut pos: usize = 0;
loop {
line_offsets.push(mappings.len() as u32);
let mut generated_column: i64 = 0;
let mut saw_semicolon = false;
while pos < len {
let byte = bytes[pos];
if byte == b';' {
pos += 1;
saw_semicolon = true;
break;
}
if byte == b',' {
pos += 1;
continue;
}
mappings.push(decode_mapping_segment(
bytes,
&mut pos,
generated_line,
&mut generated_column,
&mut state,
)?);
}
if !saw_semicolon {
break;
}
generated_line += 1;
}
line_offsets.push(mappings.len() as u32);
Ok((mappings, line_offsets))
}
fn decode_mappings_range(
input: &str,
start_line: u32,
end_line: u32,
) -> Result<(Vec<Mapping>, Vec<u32>), DecodeError> {
let actual_lines = if input.is_empty() {
0u32
} else {
input.as_bytes().iter().filter(|&&b| b == b';').count() as u32 + 1
};
let end_line = end_line.min(actual_lines);
if input.is_empty() || start_line >= end_line {
return Ok((Vec::new(), vec![0; end_line as usize + 1]));
}
let bytes = input.as_bytes();
let len = bytes.len();
let mut mappings: Vec<Mapping> = Vec::new();
let mut state = MappingsDecodeState::default();
let mut generated_line: u32 = 0;
let mut pos: usize = 0;
let mut line_starts: Vec<(u32, u32)> =
Vec::with_capacity((end_line - start_line).min(actual_lines) as usize);
loop {
let in_range = generated_line >= start_line && generated_line < end_line;
if in_range {
line_starts.push((generated_line, mappings.len() as u32));
}
let mut generated_column: i64 = 0;
let mut saw_semicolon = false;
while pos < len {
let byte = bytes[pos];
if byte == b';' {
pos += 1;
saw_semicolon = true;
break;
}
if byte == b',' {
pos += 1;
continue;
}
let mapping = decode_mapping_segment(
bytes,
&mut pos,
generated_line,
&mut generated_column,
&mut state,
)?;
if in_range {
mappings.push(mapping);
}
}
if !saw_semicolon {
break;
}
generated_line += 1;
if generated_line >= end_line {
break;
}
}
let total = mappings.len() as u32;
Ok((mappings, build_range_line_offsets(start_line, end_line, &line_starts, total)))
}
fn build_reverse_index(mappings: &[Mapping]) -> Vec<u32> {
let mut indices: Vec<u32> =
(0..mappings.len() as u32).filter(|&i| mappings[i as usize].source != NO_SOURCE).collect();
indices.sort_unstable_by(|&a, &b| {
let ma = &mappings[a as usize];
let mb = &mappings[b as usize];
ma.source
.cmp(&mb.source)
.then(ma.original_line.cmp(&mb.original_line))
.then(ma.original_column.cmp(&mb.original_column))
.then(ma.generated_line.cmp(&mb.generated_line))
.then(ma.generated_column.cmp(&mb.generated_column))
});
indices
}
pub struct MappingsIter<'a> {
bytes: &'a [u8],
len: usize,
pos: usize,
source_index: i64,
original_line: i64,
original_column: i64,
name_index: i64,
generated_line: u32,
generated_column: i64,
done: bool,
}
impl<'a> MappingsIter<'a> {
pub fn new(vlq: &'a str) -> Self {
let bytes = vlq.as_bytes();
Self {
bytes,
len: bytes.len(),
pos: 0,
source_index: 0,
original_line: 0,
original_column: 0,
name_index: 0,
generated_line: 0,
generated_column: 0,
done: false,
}
}
}
impl Iterator for MappingsIter<'_> {
type Item = Result<Mapping, DecodeError>;
fn next(&mut self) -> Option<Self::Item> {
if self.done {
return None;
}
loop {
if self.pos >= self.len {
self.done = true;
return None;
}
let byte = self.bytes[self.pos];
if byte == b';' {
self.pos += 1;
self.generated_line += 1;
self.generated_column = 0;
continue;
}
if byte == b',' {
self.pos += 1;
continue;
}
match vlq_fast(self.bytes, &mut self.pos) {
Ok(delta) => self.generated_column += delta,
Err(e) => {
self.done = true;
return Some(Err(e));
}
}
if self.pos < self.len && self.bytes[self.pos] != b',' && self.bytes[self.pos] != b';' {
match vlq_fast(self.bytes, &mut self.pos) {
Ok(delta) => self.source_index += delta,
Err(e) => {
self.done = true;
return Some(Err(e));
}
}
if self.pos >= self.len
|| self.bytes[self.pos] == b','
|| self.bytes[self.pos] == b';'
{
self.done = true;
return Some(Err(DecodeError::InvalidSegmentLength {
fields: 2,
offset: self.pos,
}));
}
match vlq_fast(self.bytes, &mut self.pos) {
Ok(delta) => self.original_line += delta,
Err(e) => {
self.done = true;
return Some(Err(e));
}
}
if self.pos >= self.len
|| self.bytes[self.pos] == b','
|| self.bytes[self.pos] == b';'
{
self.done = true;
return Some(Err(DecodeError::InvalidSegmentLength {
fields: 3,
offset: self.pos,
}));
}
match vlq_fast(self.bytes, &mut self.pos) {
Ok(delta) => self.original_column += delta,
Err(e) => {
self.done = true;
return Some(Err(e));
}
}
let name = if self.pos < self.len
&& self.bytes[self.pos] != b','
&& self.bytes[self.pos] != b';'
{
match vlq_fast(self.bytes, &mut self.pos) {
Ok(delta) => {
self.name_index += delta;
self.name_index as u32
}
Err(e) => {
self.done = true;
return Some(Err(e));
}
}
} else {
NO_NAME
};
return Some(Ok(Mapping {
generated_line: self.generated_line,
generated_column: self.generated_column as u32,
source: self.source_index as u32,
original_line: self.original_line as u32,
original_column: self.original_column as u32,
name,
is_range_mapping: false,
}));
} else {
return Some(Ok(Mapping {
generated_line: self.generated_line,
generated_column: self.generated_column as u32,
source: NO_SOURCE,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
}));
}
}
}
}
#[must_use]
pub struct SourceMapBuilder {
file: Option<String>,
source_root: Option<String>,
sources: Vec<String>,
sources_content: Vec<Option<String>>,
names: Vec<String>,
mappings: Vec<Mapping>,
ignore_list: Vec<u32>,
debug_id: Option<String>,
scopes: Option<ScopeInfo>,
}
impl SourceMapBuilder {
pub fn new() -> Self {
Self {
file: None,
source_root: None,
sources: Vec::new(),
sources_content: Vec::new(),
names: Vec::new(),
mappings: Vec::new(),
ignore_list: Vec::new(),
debug_id: None,
scopes: None,
}
}
pub fn file(mut self, file: impl Into<String>) -> Self {
self.file = Some(file.into());
self
}
pub fn source_root(mut self, root: impl Into<String>) -> Self {
self.source_root = Some(root.into());
self
}
pub fn sources(mut self, sources: impl IntoIterator<Item = impl Into<String>>) -> Self {
self.sources = sources.into_iter().map(Into::into).collect();
self
}
pub fn sources_content(
mut self,
content: impl IntoIterator<Item = Option<impl Into<String>>>,
) -> Self {
self.sources_content = content.into_iter().map(|c| c.map(Into::into)).collect();
self
}
pub fn names(mut self, names: impl IntoIterator<Item = impl Into<String>>) -> Self {
self.names = names.into_iter().map(Into::into).collect();
self
}
pub fn mappings(mut self, mappings: impl IntoIterator<Item = Mapping>) -> Self {
self.mappings = mappings.into_iter().collect();
self
}
pub fn ignore_list(mut self, list: impl IntoIterator<Item = u32>) -> Self {
self.ignore_list = list.into_iter().collect();
self
}
pub fn debug_id(mut self, id: impl Into<String>) -> Self {
self.debug_id = Some(id.into());
self
}
pub fn scopes(mut self, scopes: ScopeInfo) -> Self {
self.scopes = Some(scopes);
self
}
pub fn build(self) -> SourceMap {
SourceMap::from_parts(
self.file,
self.source_root,
self.sources,
self.sources_content,
self.names,
self.mappings,
self.ignore_list,
self.debug_id,
self.scopes,
)
}
}
impl Default for SourceMapBuilder {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn simple_map() -> &'static str {
r#"{"version":3,"sources":["input.js"],"names":["hello"],"mappings":"AAAA;AACA,EAAA;AACA"}"#
}
#[test]
fn parse_basic() {
let sm = SourceMap::from_json(simple_map()).unwrap();
assert_eq!(sm.sources, vec!["input.js"]);
assert_eq!(sm.names, vec!["hello"]);
assert_eq!(sm.line_count(), 3);
assert!(sm.mapping_count() > 0);
}
#[test]
fn to_json_roundtrip() {
let json = simple_map();
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.sources, sm.sources);
assert_eq!(sm2.names, sm.names);
assert_eq!(sm2.mapping_count(), sm.mapping_count());
assert_eq!(sm2.line_count(), sm.line_count());
for m in sm.all_mappings() {
let loc1 = sm.original_position_for(m.generated_line, m.generated_column);
let loc2 = sm2.original_position_for(m.generated_line, m.generated_column);
match (loc1, loc2) {
(Some(a), Some(b)) => {
assert_eq!(a.source, b.source);
assert_eq!(a.line, b.line);
assert_eq!(a.column, b.column);
assert_eq!(a.name, b.name);
}
(None, None) => {}
_ => panic!("lookup mismatch at ({}, {})", m.generated_line, m.generated_column),
}
}
}
#[test]
fn to_json_roundtrip_large() {
let json = generate_test_sourcemap(50, 10, 3);
let sm = SourceMap::from_json(&json).unwrap();
let output = sm.to_json();
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.mapping_count(), sm.mapping_count());
for line in (0..sm.line_count() as u32).step_by(5) {
for col in [0u32, 10, 20, 50] {
let a = sm.original_position_for(line, col);
let b = sm2.original_position_for(line, col);
match (a, b) {
(Some(a), Some(b)) => {
assert_eq!(a.source, b.source);
assert_eq!(a.line, b.line);
assert_eq!(a.column, b.column);
}
(None, None) => {}
_ => panic!("mismatch at ({line}, {col})"),
}
}
}
}
#[test]
fn to_json_preserves_fields() {
let json = r#"{"version":3,"file":"out.js","sourceRoot":"src/","sources":["app.ts"],"sourcesContent":["const x = 1;"],"names":["x"],"mappings":"AAAAA","ignoreList":[0]}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
assert!(output.contains(r#""file":"out.js""#));
assert!(output.contains(r#""sourceRoot":"src/""#));
assert!(output.contains(r#""sourcesContent":["const x = 1;"]"#));
assert!(output.contains(r#""ignoreList":[0]"#));
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.file.as_deref(), Some("out.js"));
assert_eq!(sm2.ignore_list, vec![0]);
}
#[test]
fn original_position_for_exact_match() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.source, 0);
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
}
#[test]
fn original_position_for_column_within_segment() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let loc = sm.original_position_for(1, 5);
assert!(loc.is_some());
}
#[test]
fn original_position_for_nonexistent_line() {
let sm = SourceMap::from_json(simple_map()).unwrap();
assert!(sm.original_position_for(999, 0).is_none());
}
#[test]
fn original_position_for_before_first_mapping() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let loc = sm.original_position_for(1, 0);
let _ = loc;
}
#[test]
fn generated_position_for_basic() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let loc = sm.generated_position_for("input.js", 0, 0).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
}
#[test]
fn generated_position_for_unknown_source() {
let sm = SourceMap::from_json(simple_map()).unwrap();
assert!(sm.generated_position_for("nonexistent.js", 0, 0).is_none());
}
#[test]
fn parse_invalid_version() {
let json = r#"{"version":2,"sources":[],"names":[],"mappings":""}"#;
let err = SourceMap::from_json(json).unwrap_err();
assert!(matches!(err, ParseError::InvalidVersion(2)));
}
#[test]
fn parse_empty_mappings() {
let json = r#"{"version":3,"sources":[],"names":[],"mappings":""}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.mapping_count(), 0);
assert!(sm.original_position_for(0, 0).is_none());
}
#[test]
fn parse_with_source_root() {
let json = r#"{"version":3,"sourceRoot":"src/","sources":["foo.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources, vec!["src/foo.js"]);
}
#[test]
fn parse_with_sources_content() {
let json = r#"{"version":3,"sources":["a.js"],"sourcesContent":["var x = 1;"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources_content, vec![Some("var x = 1;".to_string())]);
}
#[test]
fn mappings_for_line() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let line0 = sm.mappings_for_line(0);
assert!(!line0.is_empty());
let empty = sm.mappings_for_line(999);
assert!(empty.is_empty());
}
#[test]
fn large_sourcemap_lookup() {
let json = generate_test_sourcemap(500, 20, 5);
let sm = SourceMap::from_json(&json).unwrap();
for line in [0, 10, 100, 250, 499] {
let mappings = sm.mappings_for_line(line);
if let Some(m) = mappings.first() {
let loc = sm.original_position_for(line, m.generated_column);
assert!(loc.is_some(), "lookup failed for line {line}");
}
}
}
#[test]
fn reverse_lookup_roundtrip() {
let json = generate_test_sourcemap(100, 10, 3);
let sm = SourceMap::from_json(&json).unwrap();
let mapping = &sm.mappings[50];
if mapping.source != NO_SOURCE {
let source_name = sm.source(mapping.source);
let result = sm.generated_position_for(
source_name,
mapping.original_line,
mapping.original_column,
);
assert!(result.is_some(), "reverse lookup failed");
}
}
#[test]
fn all_generated_positions_for_basic() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let results = sm.all_generated_positions_for("input.js", 0, 0);
assert!(!results.is_empty(), "should find at least one position");
assert_eq!(results[0].line, 0);
assert_eq!(results[0].column, 0);
}
#[test]
fn all_generated_positions_for_unknown_source() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let results = sm.all_generated_positions_for("nonexistent.js", 0, 0);
assert!(results.is_empty());
}
#[test]
fn all_generated_positions_for_no_match() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let results = sm.all_generated_positions_for("input.js", 999, 999);
assert!(results.is_empty());
}
#[test]
fn encode_mappings_roundtrip() {
let json = generate_test_sourcemap(50, 10, 3);
let sm = SourceMap::from_json(&json).unwrap();
let encoded = sm.encode_mappings();
let json2 = format!(
r#"{{"version":3,"sources":{sources},"names":{names},"mappings":"{mappings}"}}"#,
sources = serde_json::to_string(&sm.sources).unwrap(),
names = serde_json::to_string(&sm.names).unwrap(),
mappings = encoded,
);
let sm2 = SourceMap::from_json(&json2).unwrap();
assert_eq!(sm2.mapping_count(), sm.mapping_count());
}
#[test]
fn indexed_source_map() {
let json = r#"{
"version": 3,
"file": "bundle.js",
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["a.js"],
"names": ["foo"],
"mappings": "AAAAA"
}
},
{
"offset": {"line": 10, "column": 0},
"map": {
"version": 3,
"sources": ["b.js"],
"names": ["bar"],
"mappings": "AAAAA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources.len(), 2);
assert!(sm.sources.contains(&"a.js".to_string()));
assert!(sm.sources.contains(&"b.js".to_string()));
assert_eq!(sm.names.len(), 2);
assert!(sm.names.contains(&"foo".to_string()));
assert!(sm.names.contains(&"bar".to_string()));
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(sm.source(loc.source), "a.js");
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
let loc = sm.original_position_for(10, 0).unwrap();
assert_eq!(sm.source(loc.source), "b.js");
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
}
#[test]
fn indexed_source_map_shared_sources() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["shared.js"],
"names": [],
"mappings": "AAAA"
}
},
{
"offset": {"line": 5, "column": 0},
"map": {
"version": 3,
"sources": ["shared.js"],
"names": [],
"mappings": "AACA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources.len(), 1);
assert_eq!(sm.sources[0], "shared.js");
let loc0 = sm.original_position_for(0, 0).unwrap();
let loc5 = sm.original_position_for(5, 0).unwrap();
assert_eq!(loc0.source, loc5.source);
}
#[test]
fn parse_ignore_list() {
let json = r#"{"version":3,"sources":["app.js","node_modules/lib.js"],"names":[],"mappings":"AAAA;ACAA","ignoreList":[1]}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.ignore_list, vec![1]);
}
fn build_sourcemap_json(
sources: &[&str],
names: &[&str],
mappings_data: &[Vec<Vec<i64>>],
) -> String {
let converted: Vec<Vec<srcmap_codec::Segment>> = mappings_data
.iter()
.map(|line| {
line.iter().map(|seg| srcmap_codec::Segment::from(seg.as_slice())).collect()
})
.collect();
let encoded = srcmap_codec::encode(&converted);
format!(
r#"{{"version":3,"sources":[{}],"names":[{}],"mappings":"{}"}}"#,
sources.iter().map(|s| format!("\"{s}\"")).collect::<Vec<_>>().join(","),
names.iter().map(|n| format!("\"{n}\"")).collect::<Vec<_>>().join(","),
encoded,
)
}
#[test]
fn decode_multiple_consecutive_semicolons() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;;;AACA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.line_count(), 4);
assert!(sm.mappings_for_line(1).is_empty());
assert!(sm.mappings_for_line(2).is_empty());
assert!(!sm.mappings_for_line(0).is_empty());
assert!(!sm.mappings_for_line(3).is_empty());
}
#[test]
fn decode_trailing_semicolons() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;;"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.line_count(), 3);
assert!(!sm.mappings_for_line(0).is_empty());
assert!(sm.mappings_for_line(1).is_empty());
assert!(sm.mappings_for_line(2).is_empty());
}
#[test]
fn decode_leading_comma() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":",AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.mapping_count(), 1);
let m = &sm.all_mappings()[0];
assert_eq!(m.generated_line, 0);
assert_eq!(m.generated_column, 0);
}
#[test]
fn decode_single_field_segments() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"A,C"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.mapping_count(), 2);
for m in sm.all_mappings() {
assert_eq!(m.source, NO_SOURCE);
}
assert_eq!(sm.all_mappings()[0].generated_column, 0);
assert_eq!(sm.all_mappings()[1].generated_column, 1);
assert!(sm.original_position_for(0, 0).is_none());
assert!(sm.original_position_for(0, 1).is_none());
}
#[test]
fn decode_five_field_segments_with_names() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0, 0], vec![10, 0, 0, 5, 1]]];
let json = build_sourcemap_json(&["app.js"], &["foo", "bar"], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert_eq!(sm.mapping_count(), 2);
assert_eq!(sm.all_mappings()[0].name, 0);
assert_eq!(sm.all_mappings()[1].name, 1);
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.name, Some(0));
assert_eq!(sm.name(0), "foo");
let loc = sm.original_position_for(0, 10).unwrap();
assert_eq!(loc.name, Some(1));
assert_eq!(sm.name(1), "bar");
}
#[test]
fn decode_large_vlq_values() {
let mappings_data = vec![vec![vec![500_i64, 0, 1000, 2000]]];
let json = build_sourcemap_json(&["big.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert_eq!(sm.mapping_count(), 1);
let m = &sm.all_mappings()[0];
assert_eq!(m.generated_column, 500);
assert_eq!(m.original_line, 1000);
assert_eq!(m.original_column, 2000);
let loc = sm.original_position_for(0, 500).unwrap();
assert_eq!(loc.line, 1000);
assert_eq!(loc.column, 2000);
}
#[test]
fn decode_only_semicolons() {
let json = r#"{"version":3,"sources":[],"names":[],"mappings":";;;"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.line_count(), 4);
assert_eq!(sm.mapping_count(), 0);
for line in 0..4 {
assert!(sm.mappings_for_line(line).is_empty());
}
}
#[test]
fn decode_mixed_single_and_four_field_segments() {
let mappings_data = vec![vec![srcmap_codec::Segment::four(5, 0, 0, 0)]];
let four_field_encoded = srcmap_codec::encode(&mappings_data);
let combined_mappings = format!("A,{four_field_encoded}");
let json = format!(
r#"{{"version":3,"sources":["x.js"],"names":[],"mappings":"{combined_mappings}"}}"#,
);
let sm = SourceMap::from_json(&json).unwrap();
assert_eq!(sm.mapping_count(), 2);
assert_eq!(sm.all_mappings()[0].source, NO_SOURCE);
assert_eq!(sm.all_mappings()[1].source, 0);
}
#[test]
fn parse_missing_optional_fields() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.file.is_none());
assert!(sm.source_root.is_none());
assert!(sm.sources_content.is_empty());
assert!(sm.ignore_list.is_empty());
}
#[test]
fn parse_with_file_field() {
let json =
r#"{"version":3,"file":"output.js","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.file.as_deref(), Some("output.js"));
}
#[test]
fn parse_null_entries_in_sources() {
let json = r#"{"version":3,"sources":["a.js",null,"c.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources.len(), 3);
assert_eq!(sm.sources[0], "a.js");
assert_eq!(sm.sources[1], "");
assert_eq!(sm.sources[2], "c.js");
}
#[test]
fn parse_null_entries_in_sources_with_source_root() {
let json = r#"{"version":3,"sourceRoot":"lib/","sources":["a.js",null],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources[0], "lib/a.js");
assert_eq!(sm.sources[1], "");
}
#[test]
fn parse_empty_names_array() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.names.is_empty());
}
#[test]
fn parse_invalid_json() {
let result = SourceMap::from_json("not valid json");
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), ParseError::Json(_)));
}
#[test]
fn parse_json_missing_version() {
let result = SourceMap::from_json(r#"{"sources":[],"names":[],"mappings":""}"#);
assert!(result.is_err());
}
#[test]
fn parse_multiple_sources_overlapping_original_positions() {
let mappings_data = vec![vec![vec![0_i64, 0, 5, 10], vec![10, 1, 5, 10]]];
let json = build_sourcemap_json(&["a.js", "b.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc0 = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc0.source, 0);
assert_eq!(sm.source(loc0.source), "a.js");
let loc1 = sm.original_position_for(0, 10).unwrap();
assert_eq!(loc1.source, 1);
assert_eq!(sm.source(loc1.source), "b.js");
assert_eq!(loc0.line, loc1.line);
assert_eq!(loc0.column, loc1.column);
}
#[test]
fn parse_sources_content_with_null_entries() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"sourcesContent":["content a",null],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources_content.len(), 2);
assert_eq!(sm.sources_content[0], Some("content a".to_string()));
assert_eq!(sm.sources_content[1], None);
}
#[test]
fn parse_empty_sources_and_names() {
let json = r#"{"version":3,"sources":[],"names":[],"mappings":""}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.sources.is_empty());
assert!(sm.names.is_empty());
assert_eq!(sm.mapping_count(), 0);
}
#[test]
fn lookup_exact_match() {
let mappings_data =
vec![vec![vec![0_i64, 0, 10, 20], vec![5, 0, 10, 25], vec![15, 0, 11, 0]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.original_position_for(0, 5).unwrap();
assert_eq!(loc.line, 10);
assert_eq!(loc.column, 25);
}
#[test]
fn lookup_before_first_segment() {
let mappings_data = vec![vec![vec![5_i64, 0, 0, 0]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert!(sm.original_position_for(0, 0).is_none());
assert!(sm.original_position_for(0, 4).is_none());
}
#[test]
fn lookup_between_segments() {
let mappings_data = vec![vec![vec![0_i64, 0, 1, 0], vec![10, 0, 2, 0], vec![20, 0, 3, 0]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.original_position_for(0, 7).unwrap();
assert_eq!(loc.line, 1);
assert_eq!(loc.column, 0);
let loc = sm.original_position_for(0, 15).unwrap();
assert_eq!(loc.line, 2);
assert_eq!(loc.column, 0);
}
#[test]
fn lookup_after_last_segment() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0], vec![10, 0, 1, 5]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.original_position_for(0, 100).unwrap();
assert_eq!(loc.line, 1);
assert_eq!(loc.column, 5);
}
#[test]
fn lookup_empty_lines_no_mappings() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0]], vec![], vec![vec![0_i64, 0, 2, 0]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert!(sm.original_position_for(1, 0).is_none());
assert!(sm.original_position_for(1, 10).is_none());
assert!(sm.original_position_for(0, 0).is_some());
assert!(sm.original_position_for(2, 0).is_some());
}
#[test]
fn lookup_line_with_single_mapping() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
let loc = sm.original_position_for(0, 50).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
}
#[test]
fn lookup_column_0_vs_column_nonzero() {
let mappings_data = vec![vec![vec![0_i64, 0, 10, 0], vec![8, 0, 20, 5]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc0 = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc0.line, 10);
assert_eq!(loc0.column, 0);
let loc8 = sm.original_position_for(0, 8).unwrap();
assert_eq!(loc8.line, 20);
assert_eq!(loc8.column, 5);
let loc4 = sm.original_position_for(0, 4).unwrap();
assert_eq!(loc4.line, 10);
}
#[test]
fn lookup_beyond_last_line() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert!(sm.original_position_for(1, 0).is_none());
assert!(sm.original_position_for(100, 0).is_none());
}
#[test]
fn lookup_single_field_returns_none() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"A"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.mapping_count(), 1);
assert!(sm.original_position_for(0, 0).is_none());
}
#[test]
fn reverse_lookup_exact_match() {
let mappings_data = vec![
vec![vec![0_i64, 0, 0, 0]],
vec![vec![4, 0, 1, 0], vec![10, 0, 1, 8]],
vec![vec![0, 0, 2, 0]],
];
let json = build_sourcemap_json(&["main.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.generated_position_for("main.js", 1, 8).unwrap();
assert_eq!(loc.line, 1);
assert_eq!(loc.column, 10);
}
#[test]
fn reverse_lookup_no_match() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0], vec![10, 0, 0, 10]]];
let json = build_sourcemap_json(&["main.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert!(sm.generated_position_for("main.js", 99, 0).is_none());
}
#[test]
fn reverse_lookup_unknown_source() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0]]];
let json = build_sourcemap_json(&["main.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert!(sm.generated_position_for("unknown.js", 0, 0).is_none());
}
#[test]
fn reverse_lookup_multiple_mappings_same_original() {
let mappings_data = vec![vec![vec![0_i64, 0, 5, 10]], vec![vec![20, 0, 5, 10]]];
let json = build_sourcemap_json(&["src.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.generated_position_for("src.js", 5, 10);
assert!(loc.is_some());
let loc = loc.unwrap();
assert!(
(loc.line == 0 && loc.column == 0) || (loc.line == 1 && loc.column == 20),
"Expected (0,0) or (1,20), got ({},{})",
loc.line,
loc.column
);
}
#[test]
fn reverse_lookup_with_multiple_sources() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0], vec![10, 1, 0, 0]]];
let json = build_sourcemap_json(&["a.js", "b.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc_a = sm.generated_position_for("a.js", 0, 0).unwrap();
assert_eq!(loc_a.line, 0);
assert_eq!(loc_a.column, 0);
let loc_b = sm.generated_position_for("b.js", 0, 0).unwrap();
assert_eq!(loc_b.line, 0);
assert_eq!(loc_b.column, 10);
}
#[test]
fn reverse_lookup_skips_single_field_segments() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"A,KAAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.generated_position_for("a.js", 0, 0).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 5);
}
#[test]
fn reverse_lookup_finds_each_original_line() {
let mappings_data = vec![
vec![vec![0_i64, 0, 0, 0]],
vec![vec![0, 0, 1, 0]],
vec![vec![0, 0, 2, 0]],
vec![vec![0, 0, 3, 0]],
];
let json = build_sourcemap_json(&["x.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
for orig_line in 0..4 {
let loc = sm.generated_position_for("x.js", orig_line, 0).unwrap();
assert_eq!(loc.line, orig_line, "reverse lookup for orig line {orig_line}");
assert_eq!(loc.column, 0);
}
}
#[test]
fn parse_with_ignore_list_multiple() {
let json = r#"{"version":3,"sources":["app.js","node_modules/lib.js","vendor.js"],"names":[],"mappings":"AAAA","ignoreList":[1,2]}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.ignore_list, vec![1, 2]);
}
#[test]
fn parse_with_empty_ignore_list() {
let json =
r#"{"version":3,"sources":["app.js"],"names":[],"mappings":"AAAA","ignoreList":[]}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.ignore_list.is_empty());
}
#[test]
fn parse_without_ignore_list_field() {
let json = r#"{"version":3,"sources":["app.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.ignore_list.is_empty());
}
#[test]
fn source_index_lookup() {
let json = r#"{"version":3,"sources":["a.js","b.js","c.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.source_index("a.js"), Some(0));
assert_eq!(sm.source_index("b.js"), Some(1));
assert_eq!(sm.source_index("c.js"), Some(2));
assert_eq!(sm.source_index("d.js"), None);
}
#[test]
fn all_mappings_returns_complete_list() {
let mappings_data =
vec![vec![vec![0_i64, 0, 0, 0], vec![5, 0, 0, 5]], vec![vec![0, 0, 1, 0]]];
let json = build_sourcemap_json(&["x.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert_eq!(sm.all_mappings().len(), 3);
assert_eq!(sm.mapping_count(), 3);
}
#[test]
fn line_count_matches_decoded_lines() {
let mappings_data =
vec![vec![vec![0_i64, 0, 0, 0]], vec![], vec![vec![0_i64, 0, 2, 0]], vec![], vec![]];
let json = build_sourcemap_json(&["x.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
assert_eq!(sm.line_count(), 5);
}
#[test]
fn parse_error_display() {
let err = ParseError::InvalidVersion(5);
assert_eq!(format!("{err}"), "unsupported source map version: 5");
let json_err = SourceMap::from_json("{}").unwrap_err();
let display = format!("{json_err}");
assert!(display.contains("JSON parse error") || display.contains("missing field"));
}
#[test]
fn original_position_name_none_for_four_field() {
let mappings_data = vec![vec![vec![0_i64, 0, 5, 10]]];
let json = build_sourcemap_json(&["a.js"], &["unused_name"], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.original_position_for(0, 0).unwrap();
assert!(loc.name.is_none());
}
#[test]
fn forward_and_reverse_roundtrip_comprehensive() {
let mappings_data = vec![
vec![vec![0_i64, 0, 0, 0], vec![10, 0, 0, 10], vec![20, 1, 5, 0]],
vec![vec![0, 0, 1, 0], vec![5, 1, 6, 3]],
vec![vec![0, 0, 2, 0]],
];
let json = build_sourcemap_json(&["a.js", "b.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
for m in sm.all_mappings() {
if m.source == NO_SOURCE {
continue;
}
let source_name = sm.source(m.source);
let orig = sm.original_position_for(m.generated_line, m.generated_column).unwrap();
assert_eq!(orig.source, m.source);
assert_eq!(orig.line, m.original_line);
assert_eq!(orig.column, m.original_column);
let gen_loc =
sm.generated_position_for(source_name, m.original_line, m.original_column).unwrap();
assert_eq!(gen_loc.line, m.generated_line);
assert_eq!(gen_loc.column, m.generated_column);
}
}
#[test]
fn source_root_with_multiple_sources() {
let json = r#"{"version":3,"sourceRoot":"lib/","sources":["a.js","b.js","c.js"],"names":[],"mappings":"AAAA,KACA,KACA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources, vec!["lib/a.js", "lib/b.js", "lib/c.js"]);
}
#[test]
fn source_root_empty_string() {
let json =
r#"{"version":3,"sourceRoot":"","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources, vec!["a.js"]);
}
#[test]
fn source_root_preserved_in_to_json() {
let json =
r#"{"version":3,"sourceRoot":"src/","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
assert!(output.contains(r#""sourceRoot":"src/""#));
}
#[test]
fn source_root_reverse_lookup_uses_prefixed_name() {
let json =
r#"{"version":3,"sourceRoot":"src/","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.generated_position_for("src/a.js", 0, 0).is_some());
assert!(sm.generated_position_for("a.js", 0, 0).is_none());
}
#[test]
fn source_root_with_trailing_slash() {
let json =
r#"{"version":3,"sourceRoot":"src/","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources[0], "src/a.js");
}
#[test]
fn source_root_without_trailing_slash() {
let json =
r#"{"version":3,"sourceRoot":"src","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources[0], "srca.js");
let output = sm.to_json();
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.sources[0], "srca.js");
}
#[test]
fn parse_empty_json_object() {
let result = SourceMap::from_json("{}");
assert!(result.is_err());
}
#[test]
fn parse_version_0() {
let json = r#"{"version":0,"sources":[],"names":[],"mappings":""}"#;
assert!(matches!(SourceMap::from_json(json).unwrap_err(), ParseError::InvalidVersion(0)));
}
#[test]
fn parse_version_4() {
let json = r#"{"version":4,"sources":[],"names":[],"mappings":""}"#;
assert!(matches!(SourceMap::from_json(json).unwrap_err(), ParseError::InvalidVersion(4)));
}
#[test]
fn parse_extra_unknown_fields_ignored() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","x_custom_field":true,"x_debug":{"foo":"bar"}}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.mapping_count(), 1);
}
#[test]
fn parse_vlq_error_propagated() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AA!A"}"#;
let result = SourceMap::from_json(json);
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), ParseError::Vlq(_)));
}
#[test]
fn parse_truncated_vlq_error() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"g"}"#;
let result = SourceMap::from_json(json);
assert!(result.is_err());
}
#[test]
fn to_json_produces_valid_json() {
let json = r#"{"version":3,"file":"out.js","sourceRoot":"src/","sources":["a.ts","b.ts"],"sourcesContent":["const x = 1;\nconst y = \"hello\";",null],"names":["x","y"],"mappings":"AAAAA,KACAC;AACA","ignoreList":[1]}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
let _: serde_json::Value = serde_json::from_str(&output).unwrap();
}
#[test]
fn to_json_escapes_special_chars() {
let json = r#"{"version":3,"sources":["path/with\"quotes.js"],"sourcesContent":["line1\nline2\ttab\\backslash"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
let _: serde_json::Value = serde_json::from_str(&output).unwrap();
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.sources_content[0].as_deref(), Some("line1\nline2\ttab\\backslash"));
}
#[test]
fn to_json_empty_map() {
let json = r#"{"version":3,"sources":[],"names":[],"mappings":""}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.mapping_count(), 0);
assert!(sm2.sources.is_empty());
}
#[test]
fn to_json_roundtrip_with_names() {
let mappings_data =
vec![vec![vec![0_i64, 0, 0, 0, 0], vec![10, 0, 0, 10, 1], vec![20, 0, 1, 0, 2]]];
let json = build_sourcemap_json(&["src.js"], &["foo", "bar", "baz"], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let output = sm.to_json();
let sm2 = SourceMap::from_json(&output).unwrap();
for m in sm2.all_mappings() {
if m.source != NO_SOURCE && m.name != NO_NAME {
let loc = sm2.original_position_for(m.generated_line, m.generated_column).unwrap();
assert!(loc.name.is_some());
}
}
}
#[test]
fn indexed_source_map_column_offset() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 10},
"map": {
"version": 3,
"sources": ["a.js"],
"names": [],
"mappings": "AAAA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.original_position_for(0, 10).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
assert!(sm.original_position_for(0, 0).is_none());
}
#[test]
fn indexed_source_map_column_offset_only_first_line() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 20},
"map": {
"version": 3,
"sources": ["a.js"],
"names": [],
"mappings": "AAAA;AAAA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.original_position_for(0, 20).unwrap();
assert_eq!(loc.column, 0);
let loc = sm.original_position_for(1, 0).unwrap();
assert_eq!(loc.column, 0);
}
#[test]
fn indexed_source_map_empty_section() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": [],
"names": [],
"mappings": ""
}
},
{
"offset": {"line": 5, "column": 0},
"map": {
"version": 3,
"sources": ["b.js"],
"names": [],
"mappings": "AAAA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources.len(), 1);
let loc = sm.original_position_for(5, 0).unwrap();
assert_eq!(sm.source(loc.source), "b.js");
}
#[test]
fn indexed_source_map_with_sources_content() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["a.js"],
"sourcesContent": ["var a = 1;"],
"names": [],
"mappings": "AAAA"
}
},
{
"offset": {"line": 5, "column": 0},
"map": {
"version": 3,
"sources": ["b.js"],
"sourcesContent": ["var b = 2;"],
"names": [],
"mappings": "AAAA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources_content.len(), 2);
assert_eq!(sm.sources_content[0], Some("var a = 1;".to_string()));
assert_eq!(sm.sources_content[1], Some("var b = 2;".to_string()));
}
#[test]
fn indexed_source_map_with_ignore_list() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["app.js", "vendor.js"],
"names": [],
"mappings": "AAAA",
"ignoreList": [1]
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(!sm.ignore_list.is_empty());
}
#[test]
fn lookup_max_column_on_line() {
let mappings_data = vec![vec![vec![0_i64, 0, 0, 0]]];
let json = build_sourcemap_json(&["a.js"], &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
let loc = sm.original_position_for(0, u32::MAX - 1).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
}
#[test]
fn mappings_for_line_beyond_end() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.mappings_for_line(u32::MAX).is_empty());
}
#[test]
fn source_with_unicode_path() {
let json =
r#"{"version":3,"sources":["src/日本語.ts"],"names":["変数"],"mappings":"AAAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources[0], "src/日本語.ts");
assert_eq!(sm.names[0], "変数");
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(sm.source(loc.source), "src/日本語.ts");
assert_eq!(sm.name(loc.name.unwrap()), "変数");
}
#[test]
fn to_json_roundtrip_unicode_sources() {
let json = r#"{"version":3,"sources":["src/日本語.ts"],"sourcesContent":["const 変数 = 1;"],"names":["変数"],"mappings":"AAAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
let _: serde_json::Value = serde_json::from_str(&output).unwrap();
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.sources[0], "src/日本語.ts");
assert_eq!(sm2.sources_content[0], Some("const 変数 = 1;".to_string()));
}
#[test]
fn many_sources_lookup() {
let sources: Vec<String> = (0..100).map(|i| format!("src/file{i}.js")).collect();
let source_strs: Vec<&str> = sources.iter().map(|s| s.as_str()).collect();
let mappings_data = vec![
sources
.iter()
.enumerate()
.map(|(i, _)| vec![(i * 10) as i64, i as i64, 0, 0])
.collect::<Vec<_>>(),
];
let json = build_sourcemap_json(&source_strs, &[], &mappings_data);
let sm = SourceMap::from_json(&json).unwrap();
for (i, src) in sources.iter().enumerate() {
assert_eq!(sm.source_index(src), Some(i as u32));
}
}
#[test]
fn clone_sourcemap() {
let json = r#"{"version":3,"sources":["a.js"],"names":["x"],"mappings":"AAAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let sm2 = sm.clone();
assert_eq!(sm2.sources, sm.sources);
assert_eq!(sm2.mapping_count(), sm.mapping_count());
let loc = sm2.original_position_for(0, 0).unwrap();
assert_eq!(sm2.source(loc.source), "a.js");
}
#[test]
fn parse_debug_id() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","debugId":"85314830-023f-4cf1-a267-535f4e37bb17"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.debug_id.as_deref(), Some("85314830-023f-4cf1-a267-535f4e37bb17"));
}
#[test]
fn parse_debug_id_snake_case() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","debug_id":"85314830-023f-4cf1-a267-535f4e37bb17"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.debug_id.as_deref(), Some("85314830-023f-4cf1-a267-535f4e37bb17"));
}
#[test]
fn parse_no_debug_id() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.debug_id, None);
}
#[test]
fn debug_id_roundtrip() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","debugId":"85314830-023f-4cf1-a267-535f4e37bb17"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
assert!(output.contains(r#""debugId":"85314830-023f-4cf1-a267-535f4e37bb17""#));
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm.debug_id, sm2.debug_id);
}
#[test]
fn debug_id_not_in_json_when_absent() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
assert!(!output.contains("debugId"));
}
fn generate_test_sourcemap(lines: usize, segs_per_line: usize, num_sources: usize) -> String {
let sources: Vec<String> = (0..num_sources).map(|i| format!("src/file{i}.js")).collect();
let names: Vec<String> = (0..20).map(|i| format!("var{i}")).collect();
let mut mappings_parts = Vec::with_capacity(lines);
let mut gen_col;
let mut src: i64 = 0;
let mut src_line: i64 = 0;
let mut src_col: i64;
let mut name: i64 = 0;
for _ in 0..lines {
gen_col = 0i64;
let mut line_parts = Vec::with_capacity(segs_per_line);
for s in 0..segs_per_line {
let gc_delta = 2 + (s as i64 * 3) % 20;
gen_col += gc_delta;
let src_delta = i64::from(s % 7 == 0);
src = (src + src_delta) % num_sources as i64;
src_line += 1;
src_col = (s as i64 * 5 + 1) % 30;
let has_name = s % 4 == 0;
if has_name {
name = (name + 1) % names.len() as i64;
}
let segment = if has_name {
srcmap_codec::Segment::five(gen_col, src, src_line, src_col, name)
} else {
srcmap_codec::Segment::four(gen_col, src, src_line, src_col)
};
line_parts.push(segment);
}
mappings_parts.push(line_parts);
}
let encoded = srcmap_codec::encode(&mappings_parts);
format!(
r#"{{"version":3,"sources":[{}],"names":[{}],"mappings":"{}"}}"#,
sources.iter().map(|s| format!("\"{s}\"")).collect::<Vec<_>>().join(","),
names.iter().map(|n| format!("\"{n}\"")).collect::<Vec<_>>().join(","),
encoded,
)
}
fn bias_map() -> &'static str {
r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA,KAAK,KAAK"}"#
}
#[test]
fn original_position_glb_exact_match() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm.original_position_for_with_bias(0, 5, Bias::GreatestLowerBound).unwrap();
assert_eq!(loc.column, 5);
}
#[test]
fn original_position_glb_snaps_left() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm.original_position_for_with_bias(0, 7, Bias::GreatestLowerBound).unwrap();
assert_eq!(loc.column, 5);
}
#[test]
fn original_position_lub_exact_match() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm.original_position_for_with_bias(0, 5, Bias::LeastUpperBound).unwrap();
assert_eq!(loc.column, 5);
}
#[test]
fn original_position_lub_snaps_right() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm.original_position_for_with_bias(0, 3, Bias::LeastUpperBound).unwrap();
assert_eq!(loc.column, 5);
}
#[test]
fn original_position_lub_before_first() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm.original_position_for_with_bias(0, 0, Bias::LeastUpperBound).unwrap();
assert_eq!(loc.column, 0);
}
#[test]
fn original_position_lub_after_last() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm.original_position_for_with_bias(0, 15, Bias::LeastUpperBound);
assert!(loc.is_none());
}
#[test]
fn original_position_glb_before_first() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm.original_position_for_with_bias(0, 0, Bias::GreatestLowerBound).unwrap();
assert_eq!(loc.column, 0);
}
#[test]
fn generated_position_lub() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc =
sm.generated_position_for_with_bias("input.js", 0, 3, Bias::LeastUpperBound).unwrap();
assert_eq!(loc.column, 5);
}
#[test]
fn generated_position_glb() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let loc = sm
.generated_position_for_with_bias("input.js", 0, 7, Bias::GreatestLowerBound)
.unwrap();
assert_eq!(loc.column, 5);
}
#[test]
fn generated_position_for_default_bias_is_glb() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let glb = sm.generated_position_for("input.js", 0, 7).unwrap();
let glb_explicit = sm
.generated_position_for_with_bias("input.js", 0, 7, Bias::GreatestLowerBound)
.unwrap();
assert_eq!(glb.line, glb_explicit.line);
assert_eq!(glb.column, glb_explicit.column);
}
#[test]
fn map_range_basic() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let range = sm.map_range(0, 0, 0, 10).unwrap();
assert_eq!(range.source, 0);
assert_eq!(range.original_start_line, 0);
assert_eq!(range.original_start_column, 0);
assert_eq!(range.original_end_line, 0);
assert_eq!(range.original_end_column, 10);
}
#[test]
fn map_range_no_mapping() {
let sm = SourceMap::from_json(bias_map()).unwrap();
let range = sm.map_range(0, 0, 5, 0);
assert!(range.is_none());
}
#[test]
fn map_range_different_sources() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA;ACAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let range = sm.map_range(0, 0, 1, 0);
assert!(range.is_none());
}
#[test]
fn extension_fields_preserved() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","x_facebook_sources":[[{"names":["<global>"]}]],"x_google_linecount":42}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.extensions.contains_key("x_facebook_sources"));
assert!(sm.extensions.contains_key("x_google_linecount"));
assert_eq!(sm.extensions.get("x_google_linecount"), Some(&serde_json::json!(42)));
let output = sm.to_json();
assert!(output.contains("x_facebook_sources"));
assert!(output.contains("x_google_linecount"));
}
#[test]
fn x_google_ignorelist_fallback() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA","x_google_ignoreList":[1]}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.ignore_list, vec![1]);
}
#[test]
fn ignorelist_takes_precedence_over_x_google() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA","ignoreList":[0],"x_google_ignoreList":[1]}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.ignore_list, vec![0]);
}
#[test]
fn source_mapping_url_external() {
let source = "var a = 1;\n//# sourceMappingURL=app.js.map\n";
let result = parse_source_mapping_url(source).unwrap();
assert_eq!(result, SourceMappingUrl::External("app.js.map".to_string()));
}
#[test]
fn source_mapping_url_inline() {
let json = r#"{"version":3,"sources":[],"names":[],"mappings":""}"#;
let b64 = base64_encode_simple(json);
let source =
format!("var a = 1;\n//# sourceMappingURL=data:application/json;base64,{b64}\n");
match parse_source_mapping_url(&source).unwrap() {
SourceMappingUrl::Inline(decoded) => {
assert_eq!(decoded, json);
}
SourceMappingUrl::External(_) => panic!("expected inline"),
}
}
#[test]
fn source_mapping_url_at_sign() {
let source = "var a = 1;\n//@ sourceMappingURL=old-style.map";
let result = parse_source_mapping_url(source).unwrap();
assert_eq!(result, SourceMappingUrl::External("old-style.map".to_string()));
}
#[test]
fn source_mapping_url_css_comment() {
let source = "body { }\n/*# sourceMappingURL=styles.css.map */";
let result = parse_source_mapping_url(source).unwrap();
assert_eq!(result, SourceMappingUrl::External("styles.css.map".to_string()));
}
#[test]
fn source_mapping_url_none() {
let source = "var a = 1;";
assert!(parse_source_mapping_url(source).is_none());
}
#[test]
fn exclude_content_option() {
let json = r#"{"version":3,"sources":["a.js"],"sourcesContent":["var a;"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let with_content = sm.to_json();
assert!(with_content.contains("sourcesContent"));
let without_content = sm.to_json_with_options(true);
assert!(!without_content.contains("sourcesContent"));
}
#[test]
fn validate_deep_clean_map() {
let sm = SourceMap::from_json(simple_map()).unwrap();
let warnings = validate_deep(&sm);
assert!(warnings.is_empty(), "unexpected warnings: {warnings:?}");
}
#[test]
fn validate_deep_unreferenced_source() {
let json =
r#"{"version":3,"sources":["used.js","unused.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let warnings = validate_deep(&sm);
assert!(warnings.iter().any(|w| w.contains("unused.js")));
}
#[test]
fn from_parts_basic() {
let mappings = vec![
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
Mapping {
generated_line: 1,
generated_column: 4,
source: 0,
original_line: 1,
original_column: 2,
name: NO_NAME,
is_range_mapping: false,
},
];
let sm = SourceMap::from_parts(
Some("out.js".to_string()),
None,
vec!["input.js".to_string()],
vec![Some("var x = 1;".to_string())],
vec![],
mappings,
vec![],
None,
None,
);
assert_eq!(sm.line_count(), 2);
assert_eq!(sm.mapping_count(), 2);
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.source, 0);
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
let loc = sm.original_position_for(1, 4).unwrap();
assert_eq!(loc.line, 1);
assert_eq!(loc.column, 2);
}
#[test]
fn from_parts_empty() {
let sm =
SourceMap::from_parts(None, None, vec![], vec![], vec![], vec![], vec![], None, None);
assert_eq!(sm.line_count(), 0);
assert_eq!(sm.mapping_count(), 0);
assert!(sm.original_position_for(0, 0).is_none());
}
#[test]
fn from_parts_with_names() {
let mappings = vec![Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: 0,
is_range_mapping: false,
}];
let sm = SourceMap::from_parts(
None,
None,
vec!["input.js".to_string()],
vec![],
vec!["myVar".to_string()],
mappings,
vec![],
None,
None,
);
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.name, Some(0));
assert_eq!(sm.name(0), "myVar");
}
#[test]
fn from_parts_roundtrip_via_json() {
let json = generate_test_sourcemap(50, 10, 3);
let sm = SourceMap::from_json(&json).unwrap();
let sm2 = SourceMap::from_parts(
sm.file.clone(),
sm.source_root.clone(),
sm.sources.clone(),
sm.sources_content.clone(),
sm.names.clone(),
sm.all_mappings().to_vec(),
sm.ignore_list.clone(),
sm.debug_id.clone(),
None,
);
assert_eq!(sm2.mapping_count(), sm.mapping_count());
assert_eq!(sm2.line_count(), sm.line_count());
for m in sm.all_mappings() {
if m.source != NO_SOURCE {
let a = sm.original_position_for(m.generated_line, m.generated_column);
let b = sm2.original_position_for(m.generated_line, m.generated_column);
match (a, b) {
(Some(a), Some(b)) => {
assert_eq!(a.source, b.source);
assert_eq!(a.line, b.line);
assert_eq!(a.column, b.column);
}
(None, None) => {}
_ => panic!("mismatch at ({}, {})", m.generated_line, m.generated_column),
}
}
}
}
#[test]
fn from_parts_reverse_lookup() {
let mappings = vec![
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 10,
original_column: 5,
name: NO_NAME,
is_range_mapping: false,
},
Mapping {
generated_line: 1,
generated_column: 8,
source: 0,
original_line: 20,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
];
let sm = SourceMap::from_parts(
None,
None,
vec!["src.js".to_string()],
vec![],
vec![],
mappings,
vec![],
None,
None,
);
let loc = sm.generated_position_for("src.js", 10, 5).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
let loc = sm.generated_position_for("src.js", 20, 0).unwrap();
assert_eq!(loc.line, 1);
assert_eq!(loc.column, 8);
}
#[test]
fn from_parts_sparse_lines() {
let mappings = vec![
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
Mapping {
generated_line: 5,
generated_column: 0,
source: 0,
original_line: 5,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
];
let sm = SourceMap::from_parts(
None,
None,
vec!["src.js".to_string()],
vec![],
vec![],
mappings,
vec![],
None,
None,
);
assert_eq!(sm.line_count(), 6);
assert!(sm.original_position_for(0, 0).is_some());
assert!(sm.original_position_for(2, 0).is_none());
assert!(sm.original_position_for(5, 0).is_some());
}
#[test]
fn from_json_lines_basic() {
let json = generate_test_sourcemap(10, 5, 2);
let sm_full = SourceMap::from_json(&json).unwrap();
let sm_partial = SourceMap::from_json_lines(&json, 3, 7).unwrap();
for line in 3..7u32 {
let full_mappings = sm_full.mappings_for_line(line);
let partial_mappings = sm_partial.mappings_for_line(line);
assert_eq!(
full_mappings.len(),
partial_mappings.len(),
"line {line} mapping count mismatch"
);
for (a, b) in full_mappings.iter().zip(partial_mappings.iter()) {
assert_eq!(a.generated_column, b.generated_column);
assert_eq!(a.source, b.source);
assert_eq!(a.original_line, b.original_line);
assert_eq!(a.original_column, b.original_column);
assert_eq!(a.name, b.name);
}
}
}
#[test]
fn from_json_lines_first_lines() {
let json = generate_test_sourcemap(10, 5, 2);
let sm_full = SourceMap::from_json(&json).unwrap();
let sm_partial = SourceMap::from_json_lines(&json, 0, 3).unwrap();
for line in 0..3u32 {
let full_mappings = sm_full.mappings_for_line(line);
let partial_mappings = sm_partial.mappings_for_line(line);
assert_eq!(full_mappings.len(), partial_mappings.len());
}
}
#[test]
fn from_json_lines_last_lines() {
let json = generate_test_sourcemap(10, 5, 2);
let sm_full = SourceMap::from_json(&json).unwrap();
let sm_partial = SourceMap::from_json_lines(&json, 7, 10).unwrap();
for line in 7..10u32 {
let full_mappings = sm_full.mappings_for_line(line);
let partial_mappings = sm_partial.mappings_for_line(line);
assert_eq!(full_mappings.len(), partial_mappings.len(), "line {line}");
}
}
#[test]
fn from_json_lines_empty_range() {
let json = generate_test_sourcemap(10, 5, 2);
let sm = SourceMap::from_json_lines(&json, 5, 5).unwrap();
assert_eq!(sm.mapping_count(), 0);
}
#[test]
fn from_json_lines_beyond_end() {
let json = generate_test_sourcemap(5, 3, 1);
let sm = SourceMap::from_json_lines(&json, 3, 100).unwrap();
assert!(sm.mapping_count() > 0);
}
#[test]
fn from_json_lines_single_line() {
let json = generate_test_sourcemap(10, 5, 2);
let sm_full = SourceMap::from_json(&json).unwrap();
let sm_partial = SourceMap::from_json_lines(&json, 5, 6).unwrap();
let full_mappings = sm_full.mappings_for_line(5);
let partial_mappings = sm_partial.mappings_for_line(5);
assert_eq!(full_mappings.len(), partial_mappings.len());
}
#[test]
fn lazy_basic_lookup() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA;AACA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert_eq!(sm.line_count(), 2);
assert_eq!(sm.sources, vec!["input.js"]);
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(sm.source(loc.source), "input.js");
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
}
#[test]
fn lazy_multiple_lines() {
let json = generate_test_sourcemap(20, 5, 3);
let sm_eager = SourceMap::from_json(&json).unwrap();
let sm_lazy = LazySourceMap::from_json(&json).unwrap();
assert_eq!(sm_lazy.line_count(), sm_eager.line_count());
for m in sm_eager.all_mappings() {
if m.source == NO_SOURCE {
continue;
}
let eager_loc =
sm_eager.original_position_for(m.generated_line, m.generated_column).unwrap();
let lazy_loc =
sm_lazy.original_position_for(m.generated_line, m.generated_column).unwrap();
assert_eq!(eager_loc.source, lazy_loc.source);
assert_eq!(eager_loc.line, lazy_loc.line);
assert_eq!(eager_loc.column, lazy_loc.column);
assert_eq!(eager_loc.name, lazy_loc.name);
}
}
#[test]
fn lazy_empty_mappings() {
let json = r#"{"version":3,"sources":[],"names":[],"mappings":""}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert_eq!(sm.line_count(), 0);
assert!(sm.original_position_for(0, 0).is_none());
}
#[test]
fn lazy_empty_lines() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;;;AACA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert_eq!(sm.line_count(), 4);
assert!(sm.original_position_for(0, 0).is_some());
assert!(sm.original_position_for(1, 0).is_none());
assert!(sm.original_position_for(2, 0).is_none());
assert!(sm.original_position_for(3, 0).is_some());
}
#[test]
fn lazy_decode_line_caching() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA,KACA;AACA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
let line0_a = sm.decode_line(0).unwrap();
let line0_b = sm.decode_line(0).unwrap();
assert_eq!(line0_a.len(), line0_b.len());
assert_eq!(line0_a[0].generated_column, line0_b[0].generated_column);
}
#[test]
fn lazy_with_names() {
let json = r#"{"version":3,"sources":["input.js"],"names":["foo","bar"],"mappings":"AAAAA,KACAC"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.name, Some(0));
assert_eq!(sm.name(0), "foo");
let loc = sm.original_position_for(0, 5).unwrap();
assert_eq!(loc.name, Some(1));
assert_eq!(sm.name(1), "bar");
}
#[test]
fn lazy_nonexistent_line() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert!(sm.original_position_for(99, 0).is_none());
let line = sm.decode_line(99).unwrap();
assert!(line.is_empty());
}
#[test]
fn lazy_into_sourcemap() {
let json = generate_test_sourcemap(20, 5, 3);
let sm_eager = SourceMap::from_json(&json).unwrap();
let sm_lazy = LazySourceMap::from_json(&json).unwrap();
let sm_converted = sm_lazy.into_sourcemap().unwrap();
assert_eq!(sm_converted.mapping_count(), sm_eager.mapping_count());
assert_eq!(sm_converted.line_count(), sm_eager.line_count());
for m in sm_eager.all_mappings() {
let a = sm_eager.original_position_for(m.generated_line, m.generated_column);
let b = sm_converted.original_position_for(m.generated_line, m.generated_column);
match (a, b) {
(Some(a), Some(b)) => {
assert_eq!(a.source, b.source);
assert_eq!(a.line, b.line);
assert_eq!(a.column, b.column);
}
(None, None) => {}
_ => panic!("mismatch at ({}, {})", m.generated_line, m.generated_column),
}
}
}
#[test]
fn lazy_source_index_lookup() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA;ACAA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert_eq!(sm.source_index("a.js"), Some(0));
assert_eq!(sm.source_index("b.js"), Some(1));
assert_eq!(sm.source_index("c.js"), None);
}
#[test]
fn lazy_mappings_for_line() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA,KACA;AACA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
let line0 = sm.mappings_for_line(0);
assert_eq!(line0.len(), 2);
let line1 = sm.mappings_for_line(1);
assert_eq!(line1.len(), 1);
let line99 = sm.mappings_for_line(99);
assert!(line99.is_empty());
}
#[test]
fn lazy_large_map_selective_decode() {
let json = generate_test_sourcemap(100, 10, 5);
let sm_eager = SourceMap::from_json(&json).unwrap();
let sm_lazy = LazySourceMap::from_json(&json).unwrap();
for line in [50, 75] {
let eager_mappings = sm_eager.mappings_for_line(line);
let lazy_mappings = sm_lazy.mappings_for_line(line);
assert_eq!(eager_mappings.len(), lazy_mappings.len(), "line {line} count mismatch");
for (a, b) in eager_mappings.iter().zip(lazy_mappings.iter()) {
assert_eq!(a.generated_column, b.generated_column);
assert_eq!(a.source, b.source);
assert_eq!(a.original_line, b.original_line);
assert_eq!(a.original_column, b.original_column);
assert_eq!(a.name, b.name);
}
}
}
#[test]
fn lazy_single_field_segments() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"A,KAAAA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert!(sm.original_position_for(0, 0).is_none());
let loc = sm.original_position_for(0, 5).unwrap();
assert_eq!(loc.source, 0);
}
#[test]
fn parse_error_display_vlq() {
let err = ParseError::Vlq(srcmap_codec::DecodeError::UnexpectedEof { offset: 3 });
assert!(err.to_string().contains("VLQ decode error"));
}
#[test]
fn parse_error_display_scopes() {
let err = ParseError::Scopes(srcmap_scopes::ScopesError::UnclosedScope);
assert!(err.to_string().contains("scopes decode error"));
}
#[test]
fn indexed_map_with_names_in_sections() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["a.js"],
"names": ["foo"],
"mappings": "AAAAA"
}
},
{
"offset": {"line": 1, "column": 0},
"map": {
"version": 3,
"sources": ["a.js"],
"names": ["foo"],
"mappings": "AAAAA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources.len(), 1);
assert_eq!(sm.names.len(), 1);
}
#[test]
fn indexed_map_with_ignore_list() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["vendor.js"],
"names": [],
"mappings": "AAAA",
"ignoreList": [0]
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.ignore_list, vec![0]);
}
#[test]
fn indexed_map_with_generated_only_segment() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["a.js"],
"names": [],
"mappings": "A,AAAA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.mapping_count() >= 1);
}
#[test]
fn indexed_map_empty_mappings() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": [],
"names": [],
"mappings": ""
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.mapping_count(), 0);
}
#[test]
fn generated_position_glb_exact_match() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA,EAAE,OAAO"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.generated_position_for_with_bias("a.js", 0, 0, Bias::GreatestLowerBound);
assert!(loc.is_some());
assert_eq!(loc.unwrap().column, 0);
}
#[test]
fn generated_position_glb_no_exact_match() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA,EAAE"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.generated_position_for_with_bias("a.js", 0, 0, Bias::GreatestLowerBound);
assert!(loc.is_some());
}
#[test]
fn generated_position_glb_wrong_source() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA,KCCA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.generated_position_for_with_bias("b.js", 5, 0, Bias::GreatestLowerBound);
if let Some(l) = loc {
assert_eq!(l.line, 0);
}
}
#[test]
fn generated_position_lub_wrong_source() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc =
sm.generated_position_for_with_bias("nonexistent.js", 0, 0, Bias::LeastUpperBound);
assert!(loc.is_none());
}
#[test]
fn to_json_with_ignore_list() {
let json = r#"{"version":3,"sources":["vendor.js"],"names":[],"mappings":"AAAA","ignoreList":[0]}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
assert!(output.contains("\"ignoreList\":[0]"));
}
#[test]
fn to_json_with_extensions() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","x_custom":"test_value"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
assert!(output.contains("x_custom"));
assert!(output.contains("test_value"));
}
#[test]
fn from_parts_empty_mappings() {
let sm = SourceMap::from_parts(
None,
None,
vec!["a.js".to_string()],
vec![Some("content".to_string())],
vec![],
vec![],
vec![],
None,
None,
);
assert_eq!(sm.mapping_count(), 0);
assert_eq!(sm.sources, vec!["a.js"]);
}
#[test]
fn from_vlq_basic() {
let sm = SourceMap::from_vlq(
"AAAA;AACA",
vec!["a.js".to_string()],
vec![],
Some("out.js".to_string()),
None,
vec![Some("content".to_string())],
vec![],
None,
)
.unwrap();
assert_eq!(sm.file.as_deref(), Some("out.js"));
assert_eq!(sm.sources, vec!["a.js"]);
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(sm.source(loc.source), "a.js");
assert_eq!(loc.line, 0);
}
#[test]
fn from_json_lines_basic_coverage() {
let json =
r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;AACA;AACA;AACA;AACA"}"#;
let sm = SourceMap::from_json_lines(json, 1, 3).unwrap();
assert!(sm.original_position_for(1, 0).is_some());
assert!(sm.original_position_for(2, 0).is_some());
}
#[test]
fn from_json_lines_with_source_root() {
let json = r#"{"version":3,"sourceRoot":"src/","sources":["a.js"],"names":[],"mappings":"AAAA;AACA"}"#;
let sm = SourceMap::from_json_lines(json, 0, 2).unwrap();
assert_eq!(sm.sources[0], "src/a.js");
}
#[test]
fn from_json_lines_with_null_source() {
let json = r#"{"version":3,"sources":[null,"a.js"],"names":[],"mappings":"AAAA,KCCA"}"#;
let sm = SourceMap::from_json_lines(json, 0, 1).unwrap();
assert_eq!(sm.sources.len(), 2);
}
#[test]
fn json_escaping_special_chars_sourcemap() {
let json = r#"{"version":3,"sources":["path/with\nnewline.js"],"sourcesContent":["line1\r\nline2\t\"quoted\"\\\u0001"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json();
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm.sources[0], sm2.sources[0]);
assert_eq!(sm.sources_content[0], sm2.sources_content[0]);
}
#[test]
fn to_json_exclude_content() {
let json = r#"{"version":3,"sources":["a.js"],"sourcesContent":["var a;"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let output = sm.to_json_with_options(true);
assert!(!output.contains("sourcesContent"));
let output_with = sm.to_json_with_options(false);
assert!(output_with.contains("sourcesContent"));
}
#[test]
fn encode_mappings_with_name() {
let json = r#"{"version":3,"sources":["a.js"],"names":["foo"],"mappings":"AAAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let encoded = sm.encode_mappings();
assert_eq!(encoded, "AAAAA");
}
#[test]
fn encode_mappings_generated_only() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"A,AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let encoded = sm.encode_mappings();
let roundtrip = SourceMap::from_json(&format!(
r#"{{"version":3,"sources":["a.js"],"names":[],"mappings":"{}"}}"#,
encoded
))
.unwrap();
assert_eq!(roundtrip.mapping_count(), sm.mapping_count());
}
#[test]
fn map_range_single_result() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA,EAAC,OAAO"}"#;
let sm = SourceMap::from_json(json).unwrap();
let result = sm.map_range(0, 0, 0, 1);
assert!(result.is_some());
let range = result.unwrap();
assert_eq!(range.source, 0);
}
#[test]
fn scopes_in_from_json() {
let info = srcmap_scopes::ScopeInfo {
scopes: vec![Some(srcmap_scopes::OriginalScope {
start: srcmap_scopes::Position { line: 0, column: 0 },
end: srcmap_scopes::Position { line: 5, column: 0 },
name: None,
kind: None,
is_stack_frame: false,
variables: vec![],
children: vec![],
})],
ranges: vec![],
};
let mut names = vec![];
let scopes_str = srcmap_scopes::encode_scopes(&info, &mut names);
let json = format!(
r#"{{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","scopes":"{scopes_str}"}}"#
);
let sm = SourceMap::from_json(&json).unwrap();
assert!(sm.scopes.is_some());
}
#[test]
fn from_json_lines_with_scopes() {
let info = srcmap_scopes::ScopeInfo {
scopes: vec![Some(srcmap_scopes::OriginalScope {
start: srcmap_scopes::Position { line: 0, column: 0 },
end: srcmap_scopes::Position { line: 5, column: 0 },
name: None,
kind: None,
is_stack_frame: false,
variables: vec![],
children: vec![],
})],
ranges: vec![],
};
let mut names = vec![];
let scopes_str = srcmap_scopes::encode_scopes(&info, &mut names);
let json = format!(
r#"{{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;AACA","scopes":"{scopes_str}"}}"#
);
let sm = SourceMap::from_json_lines(&json, 0, 2).unwrap();
assert!(sm.scopes.is_some());
}
#[test]
fn from_json_lines_with_extensions() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","x_custom":"val","not_x":"skip"}"#;
let sm = SourceMap::from_json_lines(json, 0, 1).unwrap();
assert!(sm.extensions.contains_key("x_custom"));
assert!(!sm.extensions.contains_key("not_x"));
}
#[test]
fn lazy_sourcemap_version_error() {
let json = r#"{"version":2,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let err = LazySourceMap::from_json(json).unwrap_err();
assert!(matches!(err, ParseError::InvalidVersion(2)));
}
#[test]
fn lazy_sourcemap_with_source_root() {
let json =
r#"{"version":3,"sourceRoot":"src/","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert_eq!(sm.sources[0], "src/a.js");
}
#[test]
fn lazy_sourcemap_with_ignore_list_and_extensions() {
let json = r#"{"version":3,"sources":["v.js"],"names":[],"mappings":"AAAA","ignoreList":[0],"x_custom":"val","not_x":"skip"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert_eq!(sm.ignore_list, vec![0]);
assert!(sm.extensions.contains_key("x_custom"));
assert!(!sm.extensions.contains_key("not_x"));
}
#[test]
fn lazy_sourcemap_with_scopes() {
let info = srcmap_scopes::ScopeInfo {
scopes: vec![Some(srcmap_scopes::OriginalScope {
start: srcmap_scopes::Position { line: 0, column: 0 },
end: srcmap_scopes::Position { line: 5, column: 0 },
name: None,
kind: None,
is_stack_frame: false,
variables: vec![],
children: vec![],
})],
ranges: vec![],
};
let mut names = vec![];
let scopes_str = srcmap_scopes::encode_scopes(&info, &mut names);
let json = format!(
r#"{{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","scopes":"{scopes_str}"}}"#
);
let sm = LazySourceMap::from_json(&json).unwrap();
assert!(sm.scopes.is_some());
}
#[test]
fn lazy_sourcemap_null_source() {
let json = r#"{"version":3,"sources":[null,"a.js"],"names":[],"mappings":"AAAA,KCCA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert_eq!(sm.sources.len(), 2);
}
#[test]
fn indexed_map_multi_line_section() {
let json = r#"{
"version": 3,
"sections": [
{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["a.js"],
"names": [],
"mappings": "AAAA;AACA;AACA"
}
},
{
"offset": {"line": 5, "column": 0},
"map": {
"version": 3,
"sources": ["b.js"],
"names": [],
"mappings": "AAAA;AACA"
}
}
]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.original_position_for(0, 0).is_some());
assert!(sm.original_position_for(5, 0).is_some());
}
#[test]
fn source_mapping_url_extraction() {
let input = "var x = 1;\n//# sourceMappingURL=bundle.js.map";
let url = parse_source_mapping_url(input);
assert!(matches!(url, Some(SourceMappingUrl::External(ref s)) if s == "bundle.js.map"));
let input = "body { }\n/*# sourceMappingURL=style.css.map */";
let url = parse_source_mapping_url(input);
assert!(matches!(url, Some(SourceMappingUrl::External(ref s)) if s == "style.css.map"));
let input = "var x;\n//@ sourceMappingURL=old-style.map";
let url = parse_source_mapping_url(input);
assert!(matches!(url, Some(SourceMappingUrl::External(ref s)) if s == "old-style.map"));
let input = "body{}\n/*@ sourceMappingURL=old-css.map */";
let url = parse_source_mapping_url(input);
assert!(matches!(url, Some(SourceMappingUrl::External(ref s)) if s == "old-css.map"));
let input = "var x = 1;";
let url = parse_source_mapping_url(input);
assert!(url.is_none());
let input = "//# sourceMappingURL=";
let url = parse_source_mapping_url(input);
assert!(url.is_none());
let map_json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let encoded = base64_encode_simple(map_json);
let input = format!("var x;\n//# sourceMappingURL=data:application/json;base64,{encoded}");
let url = parse_source_mapping_url(&input);
assert!(matches!(url, Some(SourceMappingUrl::Inline(_))));
}
#[test]
fn validate_deep_unreferenced_coverage() {
let sm = SourceMap::from_parts(
None,
None,
vec!["used.js".to_string(), "unused.js".to_string()],
vec![None, None],
vec![],
vec![Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
}],
vec![],
None,
None,
);
let warnings = validate_deep(&sm);
assert!(warnings.iter().any(|w| w.contains("unreferenced")));
}
#[test]
fn from_json_lines_generated_only_segment() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"A,AAAA;AACA"}"#;
let sm = SourceMap::from_json_lines(json, 0, 2).unwrap();
assert!(sm.mapping_count() >= 2);
}
#[test]
fn from_json_lines_with_names() {
let json = r#"{"version":3,"sources":["a.js"],"names":["foo"],"mappings":"AAAAA;AACAA"}"#;
let sm = SourceMap::from_json_lines(json, 0, 2).unwrap();
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.name, Some(0));
}
#[test]
fn from_parts_with_line_gap() {
let sm = SourceMap::from_parts(
None,
None,
vec!["a.js".to_string()],
vec![None],
vec![],
vec![
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
Mapping {
generated_line: 5,
generated_column: 0,
source: 0,
original_line: 5,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
],
vec![],
None,
None,
);
assert!(sm.original_position_for(0, 0).is_some());
assert!(sm.original_position_for(5, 0).is_some());
assert!(sm.original_position_for(1, 0).is_none());
}
#[test]
fn lazy_decode_line_with_names_and_generated_only() {
let json = r#"{"version":3,"sources":["a.js"],"names":["fn"],"mappings":"A,AAAAC"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
let line = sm.decode_line(0).unwrap();
assert!(line.len() >= 2);
assert_eq!(line[0].source, NO_SOURCE);
assert_ne!(line[1].name, NO_NAME);
}
#[test]
fn generated_position_glb_source_mismatch() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA,KCCA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.generated_position_for_with_bias("a.js", 100, 0, Bias::LeastUpperBound);
assert!(loc.is_none());
let loc = sm.generated_position_for_with_bias("b.js", 0, 0, Bias::GreatestLowerBound);
assert!(loc.is_none());
let loc = sm.generated_position_for_with_bias("b.js", 1, 0, Bias::GreatestLowerBound);
assert!(loc.is_some());
let loc = sm.generated_position_for_with_bias("b.js", 99, 0, Bias::LeastUpperBound);
assert!(loc.is_none());
}
#[test]
fn from_json_invalid_scopes_error() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","scopes":"!!invalid!!"}"#;
let err = SourceMap::from_json(json).unwrap_err();
assert!(matches!(err, ParseError::Scopes(_)));
}
#[test]
fn lazy_from_json_invalid_scopes_error() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","scopes":"!!invalid!!"}"#;
let err = LazySourceMap::from_json(json).unwrap_err();
assert!(matches!(err, ParseError::Scopes(_)));
}
#[test]
fn from_json_lines_invalid_scopes_error() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","scopes":"!!invalid!!"}"#;
let err = SourceMap::from_json_lines(json, 0, 1).unwrap_err();
assert!(matches!(err, ParseError::Scopes(_)));
}
#[test]
fn from_json_lines_invalid_version() {
let json = r#"{"version":2,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let err = SourceMap::from_json_lines(json, 0, 1).unwrap_err();
assert!(matches!(err, ParseError::InvalidVersion(2)));
}
#[test]
fn indexed_map_with_ignore_list_remapped() {
let json = r#"{
"version": 3,
"sections": [{
"offset": {"line": 0, "column": 0},
"map": {
"version": 3,
"sources": ["a.js", "b.js"],
"names": [],
"mappings": "AAAA;ACAA",
"ignoreList": [1]
}
}, {
"offset": {"line": 5, "column": 0},
"map": {
"version": 3,
"sources": ["b.js", "c.js"],
"names": [],
"mappings": "AAAA;ACAA",
"ignoreList": [0]
}
}]
}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(!sm.ignore_list.is_empty());
}
#[test]
fn to_json_with_debug_id() {
let sm = SourceMap::from_parts(
Some("out.js".to_string()),
None,
vec!["a.js".to_string()],
vec![None],
vec![],
vec![Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
}],
vec![],
Some("abc-123".to_string()),
None,
);
let json = sm.to_json();
assert!(json.contains(r#""debugId":"abc-123""#));
}
#[test]
fn to_json_with_ignore_list_and_extensions() {
let mut sm = SourceMap::from_parts(
None,
None,
vec!["a.js".to_string(), "b.js".to_string()],
vec![None, None],
vec![],
vec![Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
}],
vec![1],
None,
None,
);
sm.extensions.insert("x_test".to_string(), serde_json::json!(42));
let json = sm.to_json();
assert!(json.contains("\"ignoreList\":[1]"));
assert!(json.contains("\"x_test\":42"));
}
#[test]
fn from_vlq_with_all_options() {
let sm = SourceMap::from_vlq(
"AAAA;AACA",
vec!["a.js".to_string()],
vec![],
Some("out.js".to_string()),
Some("src/".to_string()),
vec![Some("content".to_string())],
vec![0],
Some("debug-123".to_string()),
)
.unwrap();
assert_eq!(sm.source(0), "a.js");
assert!(sm.original_position_for(0, 0).is_some());
assert!(sm.original_position_for(1, 0).is_some());
}
#[test]
fn lazy_into_sourcemap_roundtrip() {
let json = r#"{"version":3,"sources":["a.js"],"names":["x"],"mappings":"AAAAA;AACAA"}"#;
let lazy = LazySourceMap::from_json(json).unwrap();
let sm = lazy.into_sourcemap().unwrap();
assert!(sm.original_position_for(0, 0).is_some());
assert!(sm.original_position_for(1, 0).is_some());
assert_eq!(sm.name(0), "x");
}
#[test]
fn lazy_original_position_for_no_match() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"KAAA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert!(sm.original_position_for(0, 0).is_none());
}
#[test]
fn lazy_original_position_for_empty_line() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":";AAAA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert!(sm.original_position_for(0, 0).is_none());
assert!(sm.original_position_for(1, 0).is_some());
}
#[test]
fn lazy_original_position_generated_only() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"A;AAAA"}"#;
let sm = LazySourceMap::from_json(json).unwrap();
assert!(sm.original_position_for(0, 0).is_none());
assert!(sm.original_position_for(1, 0).is_some());
}
#[test]
fn from_json_lines_null_source() {
let json = r#"{"version":3,"sources":[null,"a.js"],"names":[],"mappings":"ACAA"}"#;
let sm = SourceMap::from_json_lines(json, 0, 1).unwrap();
assert!(sm.mapping_count() >= 1);
}
#[test]
fn from_json_lines_with_source_root_prefix() {
let json =
r#"{"version":3,"sourceRoot":"lib/","sources":["b.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json_lines(json, 0, 1).unwrap();
assert_eq!(sm.source(0), "lib/b.js");
}
#[test]
fn generated_position_for_glb_idx_zero() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAKA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.generated_position_for_with_bias("a.js", 0, 0, Bias::GreatestLowerBound);
assert!(loc.is_none());
}
#[test]
fn from_json_lines_with_ignore_list() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA;ACAA","ignoreList":[1]}"#;
let sm = SourceMap::from_json_lines(json, 0, 2).unwrap();
assert_eq!(sm.ignore_list, vec![1]);
}
#[test]
fn validate_deep_out_of_order_mappings() {
let sm = SourceMap::from_parts(
None,
None,
vec!["a.js".to_string()],
vec![None],
vec![],
vec![
Mapping {
generated_line: 1,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
},
],
vec![],
None,
None,
);
let warnings = validate_deep(&sm);
assert!(warnings.iter().any(|w| w.contains("out of order")));
}
#[test]
fn validate_deep_out_of_bounds_source() {
let sm = SourceMap::from_parts(
None,
None,
vec!["a.js".to_string()],
vec![None],
vec![],
vec![Mapping {
generated_line: 0,
generated_column: 0,
source: 5,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
}],
vec![],
None,
None,
);
let warnings = validate_deep(&sm);
assert!(warnings.iter().any(|w| w.contains("source index") && w.contains("out of bounds")));
}
#[test]
fn validate_deep_out_of_bounds_name() {
let sm = SourceMap::from_parts(
None,
None,
vec!["a.js".to_string()],
vec![None],
vec!["foo".to_string()],
vec![Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: 5,
is_range_mapping: false,
}],
vec![],
None,
None,
);
let warnings = validate_deep(&sm);
assert!(warnings.iter().any(|w| w.contains("name index") && w.contains("out of bounds")));
}
#[test]
fn validate_deep_out_of_bounds_ignore_list() {
let sm = SourceMap::from_parts(
None,
None,
vec!["a.js".to_string()],
vec![None],
vec![],
vec![Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: false,
}],
vec![10],
None,
None,
);
let warnings = validate_deep(&sm);
assert!(warnings.iter().any(|w| w.contains("ignoreList") && w.contains("out of bounds")));
}
#[test]
fn source_mapping_url_inline_decoded() {
let map_json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let encoded = base64_encode_simple(map_json);
let input = format!("var x;\n//# sourceMappingURL=data:application/json;base64,{encoded}");
let url = parse_source_mapping_url(&input);
match url {
Some(SourceMappingUrl::Inline(json)) => {
assert!(json.contains("version"));
assert!(json.contains("AAAA"));
}
_ => panic!("expected inline source map"),
}
}
#[test]
fn source_mapping_url_charset_variant() {
let map_json = r#"{"version":3}"#;
let encoded = base64_encode_simple(map_json);
let input =
format!("x\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,{encoded}");
let url = parse_source_mapping_url(&input);
assert!(matches!(url, Some(SourceMappingUrl::Inline(_))));
}
#[test]
fn source_mapping_url_invalid_base64_falls_through_to_external() {
let input = "x\n//# sourceMappingURL=data:application/json;base64,!!!invalid!!!";
let url = parse_source_mapping_url(input);
assert!(matches!(url, Some(SourceMappingUrl::External(_))));
}
#[test]
fn from_json_lines_with_extensions_preserved() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","x_custom":99}"#;
let sm = SourceMap::from_json_lines(json, 0, 1).unwrap();
assert!(sm.extensions.contains_key("x_custom"));
}
fn base64_encode_simple(input: &str) -> String {
const CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
let bytes = input.as_bytes();
let mut result = String::new();
for chunk in bytes.chunks(3) {
let b0 = chunk[0] as u32;
let b1 = if chunk.len() > 1 { chunk[1] as u32 } else { 0 };
let b2 = if chunk.len() > 2 { chunk[2] as u32 } else { 0 };
let n = (b0 << 16) | (b1 << 8) | b2;
result.push(CHARS[((n >> 18) & 0x3F) as usize] as char);
result.push(CHARS[((n >> 12) & 0x3F) as usize] as char);
if chunk.len() > 1 {
result.push(CHARS[((n >> 6) & 0x3F) as usize] as char);
} else {
result.push('=');
}
if chunk.len() > 2 {
result.push(CHARS[(n & 0x3F) as usize] as char);
} else {
result.push('=');
}
}
result
}
#[test]
fn mappings_iter_matches_decode() {
let vlq = "AAAA;AACA,EAAA;AACA";
let iter_mappings: Vec<Mapping> = MappingsIter::new(vlq).collect::<Result<_, _>>().unwrap();
let (decoded, _) = decode_mappings(vlq).unwrap();
assert_eq!(iter_mappings.len(), decoded.len());
for (a, b) in iter_mappings.iter().zip(decoded.iter()) {
assert_eq!(a.generated_line, b.generated_line);
assert_eq!(a.generated_column, b.generated_column);
assert_eq!(a.source, b.source);
assert_eq!(a.original_line, b.original_line);
assert_eq!(a.original_column, b.original_column);
assert_eq!(a.name, b.name);
}
}
#[test]
fn mappings_iter_empty() {
let mappings: Vec<Mapping> = MappingsIter::new("").collect::<Result<_, _>>().unwrap();
assert!(mappings.is_empty());
}
#[test]
fn mappings_iter_generated_only() {
let vlq = "A,AAAA";
let mappings: Vec<Mapping> = MappingsIter::new(vlq).collect::<Result<_, _>>().unwrap();
assert_eq!(mappings.len(), 2);
assert_eq!(mappings[0].source, u32::MAX);
assert_eq!(mappings[1].source, 0);
}
#[test]
fn mappings_iter_with_names() {
let vlq = "AAAAA";
let mappings: Vec<Mapping> = MappingsIter::new(vlq).collect::<Result<_, _>>().unwrap();
assert_eq!(mappings.len(), 1);
assert_eq!(mappings[0].name, 0);
}
#[test]
fn mappings_iter_multiple_lines() {
let vlq = "AAAA;AACA;AACA";
let mappings: Vec<Mapping> = MappingsIter::new(vlq).collect::<Result<_, _>>().unwrap();
assert_eq!(mappings.len(), 3);
assert_eq!(mappings[0].generated_line, 0);
assert_eq!(mappings[1].generated_line, 1);
assert_eq!(mappings[2].generated_line, 2);
}
#[test]
fn range_mappings_basic_decode() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA,CAAC,GAAG","rangeMappings":"A,C"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(sm.all_mappings()[0].is_range_mapping);
assert!(!sm.all_mappings()[1].is_range_mapping);
assert!(sm.all_mappings()[2].is_range_mapping);
}
#[test]
fn range_mapping_lookup_with_delta() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA,GAAG","rangeMappings":"A"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.original_position_for(0, 0).unwrap().column, 0);
assert_eq!(sm.original_position_for(0, 1).unwrap().column, 1);
assert_eq!(sm.original_position_for(0, 2).unwrap().column, 2);
assert_eq!(sm.original_position_for(0, 3).unwrap().column, 3);
}
#[test]
fn range_mapping_cross_line() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA","rangeMappings":"A"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.original_position_for(1, 5).unwrap().line, 1);
assert_eq!(sm.original_position_for(1, 5).unwrap().column, 0);
assert_eq!(sm.original_position_for(2, 10).unwrap().line, 2);
}
#[test]
fn range_mapping_encode_roundtrip() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA,CAAC,GAAG","rangeMappings":"A,C"}"#;
assert_eq!(SourceMap::from_json(json).unwrap().encode_range_mappings().unwrap(), "A,C");
}
#[test]
fn no_range_mappings_test() {
let sm = SourceMap::from_json(
r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA"}"#,
)
.unwrap();
assert!(!sm.has_range_mappings());
assert!(sm.encode_range_mappings().is_none());
}
#[test]
fn range_mappings_multi_line_test() {
let sm = SourceMap::from_json(r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA,CAAC;AAAA","rangeMappings":"A;A"}"#).unwrap();
assert!(sm.all_mappings()[0].is_range_mapping);
assert!(!sm.all_mappings()[1].is_range_mapping);
assert!(sm.all_mappings()[2].is_range_mapping);
}
#[test]
fn range_mappings_json_roundtrip() {
let sm = SourceMap::from_json(r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA,CAAC,GAAG","rangeMappings":"A,C"}"#).unwrap();
let output = sm.to_json();
assert!(output.contains("rangeMappings"));
assert_eq!(SourceMap::from_json(&output).unwrap().range_mapping_count(), 2);
}
#[test]
fn range_mappings_absent_from_json_test() {
assert!(
!SourceMap::from_json(
r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA"}"#
)
.unwrap()
.to_json()
.contains("rangeMappings")
);
}
#[test]
fn range_mapping_fallback_test() {
let sm = SourceMap::from_json(r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA;KACK","rangeMappings":"A"}"#).unwrap();
let loc = sm.original_position_for(1, 2).unwrap();
assert_eq!(loc.line, 1);
assert_eq!(loc.column, 0);
}
#[test]
fn range_mapping_no_fallback_non_range() {
assert!(
SourceMap::from_json(
r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA"}"#
)
.unwrap()
.original_position_for(1, 5)
.is_none()
);
}
#[test]
fn range_mapping_from_vlq_test() {
let sm = SourceMap::from_vlq_with_range_mappings(
"AAAA,CAAC",
vec!["input.js".into()],
vec![],
None,
None,
vec![],
vec![],
None,
Some("A"),
)
.unwrap();
assert!(sm.all_mappings()[0].is_range_mapping);
assert!(!sm.all_mappings()[1].is_range_mapping);
}
#[test]
fn range_mapping_encode_multi_line_test() {
let sm = SourceMap::from_json(r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA,CAAC;AAAA,CAAC","rangeMappings":"A;B"}"#).unwrap();
assert!(sm.all_mappings()[0].is_range_mapping);
assert!(!sm.all_mappings()[1].is_range_mapping);
assert!(!sm.all_mappings()[2].is_range_mapping);
assert!(sm.all_mappings()[3].is_range_mapping);
assert_eq!(sm.encode_range_mappings().unwrap(), "A;B");
}
#[test]
fn range_mapping_from_parts_test() {
let sm = SourceMap::from_parts(
None,
None,
vec!["input.js".into()],
vec![],
vec![],
vec![
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: NO_NAME,
is_range_mapping: true,
},
Mapping {
generated_line: 0,
generated_column: 5,
source: 0,
original_line: 0,
original_column: 5,
name: NO_NAME,
is_range_mapping: false,
},
],
vec![],
None,
None,
);
assert_eq!(sm.original_position_for(0, 2).unwrap().column, 2);
assert_eq!(sm.original_position_for(0, 6).unwrap().column, 5);
}
#[test]
fn range_mapping_indexed_test() {
let sm = SourceMap::from_json(r#"{"version":3,"sections":[{"offset":{"line":0,"column":0},"map":{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","rangeMappings":"A"}}]}"#).unwrap();
assert!(sm.has_range_mappings());
assert_eq!(sm.original_position_for(1, 3).unwrap().line, 1);
}
#[test]
fn range_mapping_empty_string_test() {
assert!(!SourceMap::from_json(r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"AAAA","rangeMappings":""}"#).unwrap().has_range_mappings());
}
#[test]
fn range_mapping_lub_no_underflow() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"KAAK","rangeMappings":"A"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.original_position_for_with_bias(0, 2, Bias::LeastUpperBound);
assert!(loc.is_some());
let loc = loc.unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 5);
}
#[test]
fn builder_basic() {
let sm = SourceMap::builder()
.file("output.js")
.sources(["input.ts"])
.sources_content([Some("let x = 1;")])
.names(["x"])
.mappings([Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 4,
name: 0,
is_range_mapping: false,
}])
.build();
assert_eq!(sm.file.as_deref(), Some("output.js"));
assert_eq!(sm.sources, vec!["input.ts"]);
assert_eq!(sm.sources_content, vec![Some("let x = 1;".to_string())]);
assert_eq!(sm.names, vec!["x"]);
assert_eq!(sm.mapping_count(), 1);
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(sm.source(loc.source), "input.ts");
assert_eq!(loc.column, 4);
assert_eq!(sm.name(loc.name.unwrap()), "x");
}
#[test]
fn builder_empty() {
let sm = SourceMap::builder().build();
assert_eq!(sm.mapping_count(), 0);
assert_eq!(sm.sources.len(), 0);
assert_eq!(sm.names.len(), 0);
assert!(sm.file.is_none());
}
#[test]
fn builder_multiple_sources() {
let sm = SourceMap::builder()
.sources(["a.ts", "b.ts", "c.ts"])
.sources_content([Some("// a"), Some("// b"), None])
.mappings([
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: u32::MAX,
is_range_mapping: false,
},
Mapping {
generated_line: 1,
generated_column: 0,
source: 1,
original_line: 0,
original_column: 0,
name: u32::MAX,
is_range_mapping: false,
},
Mapping {
generated_line: 2,
generated_column: 0,
source: 2,
original_line: 0,
original_column: 0,
name: u32::MAX,
is_range_mapping: false,
},
])
.build();
assert_eq!(sm.sources.len(), 3);
assert_eq!(sm.mapping_count(), 3);
assert_eq!(sm.line_count(), 3);
let loc0 = sm.original_position_for(0, 0).unwrap();
assert_eq!(sm.source(loc0.source), "a.ts");
let loc1 = sm.original_position_for(1, 0).unwrap();
assert_eq!(sm.source(loc1.source), "b.ts");
let loc2 = sm.original_position_for(2, 0).unwrap();
assert_eq!(sm.source(loc2.source), "c.ts");
}
#[test]
fn builder_with_iterators() {
let source_names: Vec<String> = (0..5).map(|i| format!("mod_{i}.ts")).collect();
let mappings = (0..5u32).map(|i| Mapping {
generated_line: i,
generated_column: 0,
source: i,
original_line: i,
original_column: 0,
name: u32::MAX,
is_range_mapping: false,
});
let sm = SourceMap::builder()
.sources(source_names.iter().map(|s| s.as_str()))
.mappings(mappings)
.build();
assert_eq!(sm.sources.len(), 5);
assert_eq!(sm.mapping_count(), 5);
for i in 0..5u32 {
let loc = sm.original_position_for(i, 0).unwrap();
assert_eq!(sm.source(loc.source), format!("mod_{i}.ts"));
}
}
#[test]
fn builder_ignore_list_and_debug_id() {
let sm = SourceMap::builder()
.sources(["app.ts", "node_modules/lib.js"])
.ignore_list([1])
.debug_id("85314830-023f-4cf1-a267-535f4e37bb17")
.build();
assert_eq!(sm.ignore_list, vec![1]);
assert_eq!(sm.debug_id.as_deref(), Some("85314830-023f-4cf1-a267-535f4e37bb17"));
}
#[test]
fn builder_range_mappings() {
let sm = SourceMap::builder()
.sources(["input.ts"])
.mappings([
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: u32::MAX,
is_range_mapping: true,
},
Mapping {
generated_line: 0,
generated_column: 10,
source: 0,
original_line: 5,
original_column: 0,
name: u32::MAX,
is_range_mapping: false,
},
])
.build();
assert!(sm.has_range_mappings());
assert_eq!(sm.mapping_count(), 2);
}
#[test]
fn builder_json_roundtrip() {
let sm = SourceMap::builder()
.file("out.js")
.source_root("/src/")
.sources(["a.ts", "b.ts"])
.sources_content([Some("// a"), Some("// b")])
.names(["foo", "bar"])
.mappings([
Mapping {
generated_line: 0,
generated_column: 0,
source: 0,
original_line: 0,
original_column: 0,
name: 0,
is_range_mapping: false,
},
Mapping {
generated_line: 1,
generated_column: 5,
source: 1,
original_line: 3,
original_column: 2,
name: 1,
is_range_mapping: false,
},
])
.build();
let json = sm.to_json();
let sm2 = SourceMap::from_json(&json).unwrap();
assert_eq!(sm2.file, sm.file);
assert_eq!(sm2.sources, vec!["/src/a.ts", "/src/b.ts"]);
assert_eq!(sm2.names, sm.names);
assert_eq!(sm2.mapping_count(), sm.mapping_count());
for m in sm.all_mappings() {
let a = sm.original_position_for(m.generated_line, m.generated_column);
let b = sm2.original_position_for(m.generated_line, m.generated_column);
match (a, b) {
(Some(a), Some(b)) => {
assert_eq!(a.source, b.source);
assert_eq!(a.line, b.line);
assert_eq!(a.column, b.column);
assert_eq!(a.name, b.name);
}
(None, None) => {}
_ => panic!("lookup mismatch"),
}
}
}
#[test]
fn range_mapping_fallback_column_underflow() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"KAAK","rangeMappings":"A"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.original_position_for(0, 2);
assert!(loc.is_none());
}
#[test]
fn range_mapping_fallback_cross_line_column_zero() {
let json = r#"{"version":3,"sources":["input.js"],"names":[],"mappings":"UAAU","rangeMappings":"A"}"#;
let sm = SourceMap::from_json(json).unwrap();
let loc = sm.original_position_for(1, 0).unwrap();
assert_eq!(loc.line, 1);
assert_eq!(loc.column, 10);
}
#[test]
fn vlq_overflow_at_shift_60() {
let overflow_vlq = "ggggggggggggggA"; let json = format!(
r#"{{"version":3,"sources":["a.js"],"names":[],"mappings":"{}"}}"#,
overflow_vlq
);
let result = SourceMap::from_json(&json);
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), ParseError::Vlq(_)));
}
#[test]
fn lazy_sourcemap_rejects_indexed_maps() {
let json = r#"{"version":3,"sections":[{"offset":{"line":0,"column":0},"map":{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}}]}"#;
let result = LazySourceMap::from_json_fast(json);
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), ParseError::NestedIndexMap));
let result = LazySourceMap::from_json_no_content(json);
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), ParseError::NestedIndexMap));
}
#[test]
fn lazy_sourcemap_regular_map_still_works() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;AACA"}"#;
let sm = LazySourceMap::from_json_fast(json).unwrap();
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(sm.source(loc.source), "a.js");
assert_eq!(loc.line, 0);
}
#[test]
fn lazy_sourcemap_get_source_name_bounds() {
let json = r#"{"version":3,"sources":["a.js"],"names":["foo"],"mappings":"AAAAA"}"#;
let sm = LazySourceMap::from_json_fast(json).unwrap();
assert_eq!(sm.get_source(0), Some("a.js"));
assert_eq!(sm.get_source(1), None);
assert_eq!(sm.get_source(u32::MAX), None);
assert_eq!(sm.get_name(0), Some("foo"));
assert_eq!(sm.get_name(1), None);
assert_eq!(sm.get_name(u32::MAX), None);
}
#[test]
fn lazy_sourcemap_backward_seek() {
let json =
r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;AACA;AACA;AACA;AACA"}"#;
let sm = LazySourceMap::from_json_fast(json).unwrap();
let loc3 = sm.original_position_for(3, 0).unwrap();
assert_eq!(loc3.line, 3);
let loc1 = sm.original_position_for(1, 0).unwrap();
assert_eq!(loc1.line, 1);
let loc4 = sm.original_position_for(4, 0).unwrap();
assert_eq!(loc4.line, 4);
let loc0 = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc0.line, 0);
}
#[test]
fn lazy_sourcemap_fast_scan_vs_prescan_consistency() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":["x","y"],"mappings":"AAAAA,KACAC;ACAAD,KACAC"}"#;
let fast = LazySourceMap::from_json_fast(json).unwrap();
let prescan = LazySourceMap::from_json_no_content(json).unwrap();
for line in 0..2 {
for col in [0, 5, 10] {
let a = fast.original_position_for(line, col);
let b = prescan.original_position_for(line, col);
match (&a, &b) {
(Some(a), Some(b)) => {
assert_eq!(a.source, b.source, "line={line}, col={col}");
assert_eq!(a.line, b.line, "line={line}, col={col}");
assert_eq!(a.column, b.column, "line={line}, col={col}");
assert_eq!(a.name, b.name, "line={line}, col={col}");
}
(None, None) => {}
_ => panic!("mismatch at line={line}, col={col}: {a:?} vs {b:?}"),
}
}
}
}
#[test]
fn mappings_iter_rejects_two_field_segment() {
let result: Result<Vec<_>, _> = MappingsIter::new("AA").collect();
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), DecodeError::InvalidSegmentLength { fields: 2, .. }));
}
#[test]
fn mappings_iter_rejects_three_field_segment() {
let result: Result<Vec<_>, _> = MappingsIter::new("AAA").collect();
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), DecodeError::InvalidSegmentLength { fields: 3, .. }));
}
#[test]
fn decode_mappings_range_caps_end_line() {
let mappings = "AAAA;AACA";
let (result, offsets) = decode_mappings_range(mappings, 0, 1_000_000).unwrap();
assert_eq!(result.len(), 2);
assert!(offsets.len() <= 3); }
#[test]
fn decode_range_mappings_cross_line_bound_check() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA;AAAA","rangeMappings":"E"}"#;
let sm = SourceMap::from_json(json).unwrap();
assert!(!sm.all_mappings()[1].is_range_mapping);
}
#[test]
fn fast_scan_lines_empty() {
let result = fast_scan_lines("");
assert!(result.is_empty());
}
#[test]
fn fast_scan_lines_no_semicolons() {
let result = fast_scan_lines("AAAA,CAAC");
assert_eq!(result.len(), 1);
assert_eq!(result[0].byte_offset, 0);
assert_eq!(result[0].byte_end, 9);
}
#[test]
fn fast_scan_lines_only_semicolons() {
let result = fast_scan_lines(";;;");
assert_eq!(result.len(), 4);
for info in &result {
assert_eq!(info.byte_offset, info.byte_end); }
}
#[test]
fn from_data_url_base64() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let encoded = base64_encode_simple(json);
let url = format!("data:application/json;base64,{encoded}");
let sm = SourceMap::from_data_url(&url).unwrap();
assert_eq!(sm.sources, vec!["a.js"]);
let loc = sm.original_position_for(0, 0).unwrap();
assert_eq!(loc.line, 0);
assert_eq!(loc.column, 0);
}
#[test]
fn from_data_url_base64_charset_utf8() {
let json = r#"{"version":3,"sources":["b.js"],"names":[],"mappings":"AAAA"}"#;
let encoded = base64_encode_simple(json);
let url = format!("data:application/json;charset=utf-8;base64,{encoded}");
let sm = SourceMap::from_data_url(&url).unwrap();
assert_eq!(sm.sources, vec!["b.js"]);
}
#[test]
fn from_data_url_plain_json() {
let json = r#"{"version":3,"sources":["c.js"],"names":[],"mappings":"AAAA"}"#;
let url = format!("data:application/json,{json}");
let sm = SourceMap::from_data_url(&url).unwrap();
assert_eq!(sm.sources, vec!["c.js"]);
}
#[test]
fn from_data_url_percent_encoded() {
let url = "data:application/json,%7B%22version%22%3A3%2C%22sources%22%3A%5B%22d.js%22%5D%2C%22names%22%3A%5B%5D%2C%22mappings%22%3A%22AAAA%22%7D";
let sm = SourceMap::from_data_url(url).unwrap();
assert_eq!(sm.sources, vec!["d.js"]);
}
#[test]
fn from_data_url_invalid_prefix() {
let result = SourceMap::from_data_url("data:text/plain;base64,abc");
assert!(result.is_err());
}
#[test]
fn from_data_url_not_a_data_url() {
let result = SourceMap::from_data_url("https://example.com/foo.map");
assert!(result.is_err());
}
#[test]
fn from_data_url_invalid_base64() {
let result = SourceMap::from_data_url("data:application/json;base64,!!!invalid!!!");
assert!(result.is_err());
}
#[test]
fn from_data_url_roundtrip_with_to_data_url() {
use crate::utils::to_data_url;
let json = r#"{"version":3,"sources":["round.js"],"names":["x"],"mappings":"AACAA"}"#;
let url = to_data_url(json);
let sm = SourceMap::from_data_url(&url).unwrap();
assert_eq!(sm.sources, vec!["round.js"]);
assert_eq!(sm.names, vec!["x"]);
}
#[test]
fn to_writer_basic() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let mut buf = Vec::new();
sm.to_writer(&mut buf).unwrap();
let output = String::from_utf8(buf).unwrap();
assert!(output.contains("\"version\":3"));
assert!(output.contains("\"sources\":[\"a.js\"]"));
let sm2 = SourceMap::from_json(&output).unwrap();
assert_eq!(sm2.sources, sm.sources);
}
#[test]
fn to_writer_matches_to_json() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":["foo"],"mappings":"AACAA,GCAA","sourcesContent":["var foo;","var bar;"]}"#;
let sm = SourceMap::from_json(json).unwrap();
let expected = sm.to_json();
let mut buf = Vec::new();
sm.to_writer(&mut buf).unwrap();
let output = String::from_utf8(buf).unwrap();
assert_eq!(output, expected);
}
#[test]
fn to_writer_with_options_excludes_content() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA","sourcesContent":["var x;"]}"#;
let sm = SourceMap::from_json(json).unwrap();
let mut buf = Vec::new();
sm.to_writer_with_options(&mut buf, true).unwrap();
let output = String::from_utf8(buf).unwrap();
assert!(!output.contains("sourcesContent"));
}
#[test]
fn set_file() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let mut sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.file, None);
sm.set_file(Some("output.js".to_string()));
assert_eq!(sm.file, Some("output.js".to_string()));
assert!(sm.to_json().contains(r#""file":"output.js""#));
sm.set_file(None);
assert_eq!(sm.file, None);
assert!(!sm.to_json().contains("file"));
}
#[test]
fn set_source_root() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let mut sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.source_root, None);
sm.set_source_root(Some("src/".to_string()));
assert_eq!(sm.source_root, Some("src/".to_string()));
assert!(sm.to_json().contains(r#""sourceRoot":"src/""#));
sm.set_source_root(None);
assert_eq!(sm.source_root, None);
}
#[test]
fn set_debug_id() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let mut sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.debug_id, None);
sm.set_debug_id(Some("abc-123".to_string()));
assert_eq!(sm.debug_id, Some("abc-123".to_string()));
assert!(sm.to_json().contains(r#""debugId":"abc-123""#));
sm.set_debug_id(None);
assert_eq!(sm.debug_id, None);
assert!(!sm.to_json().contains("debugId"));
}
#[test]
fn set_ignore_list() {
let json = r#"{"version":3,"sources":["a.js","b.js"],"names":[],"mappings":"AAAA"}"#;
let mut sm = SourceMap::from_json(json).unwrap();
assert!(sm.ignore_list.is_empty());
sm.set_ignore_list(vec![0, 1]);
assert_eq!(sm.ignore_list, vec![0, 1]);
assert!(sm.to_json().contains("\"ignoreList\":[0,1]"));
sm.set_ignore_list(vec![]);
assert!(sm.ignore_list.is_empty());
assert!(!sm.to_json().contains("ignoreList"));
}
#[test]
fn set_sources() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let mut sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources, vec!["a.js"]);
sm.set_sources(vec![Some("x.js".to_string()), Some("y.js".to_string())]);
assert_eq!(sm.sources, vec!["x.js", "y.js"]);
assert_eq!(sm.source_index("x.js"), Some(0));
assert_eq!(sm.source_index("y.js"), Some(1));
assert_eq!(sm.source_index("a.js"), None);
}
#[test]
fn set_sources_with_source_root() {
let json =
r#"{"version":3,"sourceRoot":"src/","sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let mut sm = SourceMap::from_json(json).unwrap();
assert_eq!(sm.sources, vec!["src/a.js"]);
sm.set_sources(vec![Some("b.js".to_string())]);
assert_eq!(sm.sources, vec!["src/b.js"]);
}
#[test]
fn to_data_url_roundtrip() {
let json = r#"{"version":3,"sources":["a.js"],"names":[],"mappings":"AAAA"}"#;
let sm = SourceMap::from_json(json).unwrap();
let url = sm.to_data_url();
assert!(url.starts_with("data:application/json;base64,"));
let sm2 = SourceMap::from_data_url(&url).unwrap();
assert_eq!(sm.sources, sm2.sources);
assert_eq!(sm.to_json(), sm2.to_json());
}
}