posthog_cli/sourcemaps/
content.rs

1use anyhow::{anyhow, bail, Result};
2use magic_string::{GenerateDecodedMapOptions, MagicString};
3use posthog_symbol_data::{write_symbol_data, HermesMap};
4use serde::{Deserialize, Serialize};
5use serde_json::Value;
6use sourcemap::SourceMap;
7use std::{collections::BTreeMap, path::PathBuf};
8
9use crate::{
10    api::symbol_sets::SymbolSetUpload,
11    sourcemaps::constant::{CHUNKID_COMMENT_PREFIX, CHUNKID_PLACEHOLDER, CODE_SNIPPET_TEMPLATE},
12    utils::files::SourceFile,
13};
14
15#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
16pub struct SourceMapContent {
17    #[serde(skip_serializing_if = "Option::is_none")]
18    pub release_id: Option<String>,
19    #[serde(skip_serializing_if = "Option::is_none", alias = "debugId")]
20    pub chunk_id: Option<String>,
21    #[serde(flatten)]
22    pub fields: BTreeMap<String, Value>,
23}
24
25pub struct SourceMapFile {
26    pub inner: SourceFile<SourceMapContent>,
27}
28
29pub struct MinifiedSourceFile {
30    pub inner: SourceFile<String>,
31}
32
33impl SourceMapFile {
34    pub fn load(path: &PathBuf) -> Result<Self> {
35        let inner = SourceFile::load(path)?;
36
37        Ok(Self { inner })
38    }
39
40    pub fn save(&self) -> Result<()> {
41        self.inner.save(None)
42    }
43
44    pub fn get_chunk_id(&self) -> Option<String> {
45        self.inner.content.chunk_id.clone()
46    }
47
48    pub fn get_release_id(&self) -> Option<String> {
49        self.inner.content.release_id.clone()
50    }
51
52    pub fn has_release_id(&self) -> bool {
53        self.get_release_id().is_some()
54    }
55
56    pub fn apply_adjustment(&mut self, adjustment: SourceMap) -> Result<()> {
57        let new_content = {
58            let content = serde_json::to_string(&self.inner.content)?.into_bytes();
59            let mut map = sourcemap::decode_slice(content.as_slice())
60                .map_err(|err| anyhow!("Failed to parse sourcemap: {err}"))?;
61
62            // This looks weird. The reason we do it, is that we want `original` below
63            // to be a &mut SourceMap. This is easy to do if it's a Regular, or Hermes
64            // map, but if it's an Index map (Regular is already a SourceMap, so just
65            // taking the &mut works, and Hermes maps impl DerefMut<Target = SourceMap>),
66            // but for index maps, we have to flatten first, and that necessitates a Clone.
67            // Doing that Clone in the match below and then trying to borrow a &mut to the
68            // result of the Clone causes us to try and borrow something we immediately drop,
69            // (the clone is done in the match arm scope, and then a ref to a local in that
70            // scope is returned to the outer scope), so instead, we do the clone here if
71            // we need to, and declare the index branch unreachable below.
72            if let sourcemap::DecodedMap::Index(indexed) = &mut map {
73                let replacement = indexed
74                    .flatten()
75                    .map_err(|err| anyhow!("Failed to flatten sourcemap: {err}"))?;
76
77                map = sourcemap::DecodedMap::Regular(replacement);
78            };
79
80            let original = match &mut map {
81                sourcemap::DecodedMap::Regular(m) => m,
82                sourcemap::DecodedMap::Hermes(m) => m,
83                sourcemap::DecodedMap::Index(_) => unreachable!(),
84            };
85
86            original.adjust_mappings(&adjustment);
87
88            let mut content = content;
89            content.clear();
90            original.to_writer(&mut content)?;
91            serde_json::from_slice(&content)?
92        };
93
94        let mut old_content = std::mem::replace(&mut self.inner.content, new_content);
95        self.inner.content.chunk_id = old_content.chunk_id.take();
96        self.inner.content.release_id = old_content.release_id.take();
97
98        Ok(())
99    }
100
101    pub fn set_chunk_id(&mut self, chunk_id: Option<String>) {
102        self.inner.content.chunk_id = chunk_id;
103    }
104
105    pub fn set_release_id(&mut self, release_id: Option<String>) {
106        self.inner.content.release_id = release_id;
107    }
108}
109
110impl MinifiedSourceFile {
111    pub fn load(path: &PathBuf) -> Result<Self> {
112        let inner = SourceFile::load(path)?;
113
114        Ok(Self { inner })
115    }
116
117    pub fn save(&self) -> Result<()> {
118        self.inner.save(None)
119    }
120
121    pub fn get_chunk_id(&self) -> Option<String> {
122        let patterns = ["//# chunkId="];
123        self.get_comment_value(&patterns)
124    }
125
126    pub fn set_chunk_id(&mut self, chunk_id: &str) -> Result<SourceMap> {
127        let (new_source_content, source_adjustment) = {
128            // Update source content with chunk ID
129            let source_content = &self.inner.content;
130            let mut magic_source = MagicString::new(source_content);
131            let code_snippet = CODE_SNIPPET_TEMPLATE.replace(CHUNKID_PLACEHOLDER, chunk_id);
132            magic_source
133                .prepend(&code_snippet)
134                .map_err(|err| anyhow!("Failed to prepend code snippet: {err}"))?;
135            let chunk_comment = CHUNKID_COMMENT_PREFIX.replace(CHUNKID_PLACEHOLDER, chunk_id);
136            magic_source
137                .append(&chunk_comment)
138                .map_err(|err| anyhow!("Failed to append chunk comment: {err}"))?;
139            let adjustment = magic_source
140                .generate_map(GenerateDecodedMapOptions {
141                    include_content: true,
142                    ..Default::default()
143                })
144                .map_err(|err| anyhow!("Failed to generate source map: {err}"))?;
145            let adjustment_sourcemap = SourceMap::from_slice(
146                adjustment
147                    .to_string()
148                    .map_err(|err| anyhow!("Failed to serialize source map: {err}"))?
149                    .as_bytes(),
150            )
151            .map_err(|err| anyhow!("Failed to parse adjustment sourcemap: {err}"))?;
152            (magic_source.to_string(), adjustment_sourcemap)
153        };
154
155        self.inner.content = new_source_content;
156        Ok(source_adjustment)
157    }
158
159    pub fn get_sourcemap_path(&self, prefix: &Option<String>) -> Result<Option<PathBuf>> {
160        let mut possible_paths = Vec::new();
161        if let Some(filename) = self.get_sourcemap_reference()? {
162            possible_paths.push(
163                self.inner
164                    .path
165                    .parent()
166                    .map(|p| p.join(&filename))
167                    .unwrap_or_else(|| PathBuf::from(&filename)),
168            );
169
170            if let Some(prefix) = prefix {
171                if let Some(filename) = filename.strip_prefix(prefix) {
172                    possible_paths.push(
173                        self.inner
174                            .path
175                            .parent()
176                            .map(|p| p.join(filename))
177                            .unwrap_or_else(|| PathBuf::from(&filename)),
178                    );
179                }
180
181                if let Some(filename) = filename.strip_prefix(&format!("{prefix}/")) {
182                    possible_paths.push(
183                        self.inner
184                            .path
185                            .parent()
186                            .map(|p| p.join(filename))
187                            .unwrap_or_else(|| PathBuf::from(&filename)),
188                    );
189                }
190            }
191        };
192
193        let mut guessed_path = self.inner.path.to_path_buf();
194        match guessed_path.extension() {
195            Some(ext) => guessed_path.set_extension(format!("{}.map", ext.to_string_lossy())),
196            None => guessed_path.set_extension("map"),
197        };
198        possible_paths.push(guessed_path);
199
200        for path in possible_paths.into_iter() {
201            if path.exists() {
202                return Ok(Some(path));
203            }
204        }
205
206        Ok(None)
207    }
208
209    pub fn get_sourcemap_reference(&self) -> Result<Option<String>> {
210        let patterns = ["//# sourceMappingURL=", "//@ sourceMappingURL="];
211        let Some(found) = self.get_comment_value(&patterns) else {
212            return Ok(None);
213        };
214        Ok(Some(urlencoding::decode(&found)?.into_owned()))
215    }
216
217    fn get_comment_value(&self, patterns: &[&str]) -> Option<String> {
218        for line in self.inner.content.lines().rev() {
219            if let Some(val) = patterns
220                // For each pattern passed
221                .iter()
222                // If the pattern matches
223                .filter(|p| line.starts_with(*p))
224                // And the line actually contains a key:value pair split by an equals
225                .filter_map(|_| line.split_once('=').map(|s| s.1.to_string())) // And the split_once returns a Some
226                // Return this value
227                .next()
228            {
229                return Some(val);
230            }
231        }
232        None
233    }
234
235    pub fn remove_chunk_id(&mut self, chunk_id: String) -> Result<SourceMap> {
236        let (new_source_content, source_adjustment) = {
237            // Update source content with chunk ID
238            let source_content = &self.inner.content;
239            let mut magic_source = MagicString::new(source_content);
240
241            let chunk_comment = CHUNKID_COMMENT_PREFIX.replace(CHUNKID_PLACEHOLDER, &chunk_id);
242            if let Some(chunk_comment_start) = source_content.find(&chunk_comment) {
243                let chunk_comment_end = chunk_comment_start as i64 + chunk_comment.len() as i64;
244                magic_source
245                    .remove(chunk_comment_start as i64, chunk_comment_end)
246                    .map_err(|err| anyhow!("Failed to remove chunk comment: {err}"))?;
247            }
248
249            let code_snippet = CODE_SNIPPET_TEMPLATE.replace(CHUNKID_PLACEHOLDER, &chunk_id);
250            if let Some(code_snippet_start) = source_content.find(&code_snippet) {
251                let code_snippet_end = code_snippet_start as i64 + code_snippet.len() as i64;
252                magic_source
253                    .remove(code_snippet_start as i64, code_snippet_end)
254                    .map_err(|err| anyhow!("Failed to remove code snippet {err}"))?;
255            }
256
257            let adjustment = magic_source
258                .generate_map(GenerateDecodedMapOptions {
259                    include_content: true,
260                    ..Default::default()
261                })
262                .map_err(|err| anyhow!("Failed to generate source map: {err}"))?;
263
264            let adjustment_sourcemap = SourceMap::from_slice(
265                adjustment
266                    .to_string()
267                    .map_err(|err| anyhow!("Failed to serialize source map: {err}"))?
268                    .as_bytes(),
269            )
270            .map_err(|err| anyhow!("Failed to parse adjustment sourcemap: {err}"))?;
271
272            (magic_source.to_string(), adjustment_sourcemap)
273        };
274
275        self.inner.content = new_source_content;
276        Ok(source_adjustment)
277    }
278}
279
280impl TryInto<SymbolSetUpload> for SourceMapFile {
281    type Error = anyhow::Error;
282
283    fn try_into(self) -> Result<SymbolSetUpload> {
284        let chunk_id = self
285            .get_chunk_id()
286            .ok_or_else(|| anyhow!("Chunk ID not found"))?;
287
288        let release_id = self.get_release_id();
289        let sourcemap = self.inner.content;
290        let content = serde_json::to_string(&sourcemap)?;
291        if !sourcemap.fields.contains_key("x_hermes_function_offsets") {
292            bail!("Map is not a hermes sourcemap - missing key x_hermes_function_offsets");
293        }
294
295        let data = HermesMap { sourcemap: content };
296
297        let data = write_symbol_data(data)?;
298
299        Ok(SymbolSetUpload {
300            chunk_id,
301            release_id,
302            data,
303        })
304    }
305}