turbopack_core/source_map/
utils.rs1use std::collections::HashSet;
2
3use anyhow::{Context, Result};
4use const_format::concatcp;
5use once_cell::sync::Lazy;
6use regex::Regex;
7use serde::{Deserialize, Serialize};
8use turbo_tasks::Vc;
9use turbo_tasks_fs::{
10 DiskFileSystem, FileContent, FileSystemPath, rope::Rope, util::uri_from_file,
11};
12
13use crate::SOURCE_URL_PROTOCOL;
14
15pub fn add_default_ignore_list(map: &mut swc_sourcemap::SourceMap) {
16 let mut ignored_ids = HashSet::new();
17
18 for (source_id, source) in map.sources().enumerate() {
19 if source.starts_with(concatcp!(SOURCE_URL_PROTOCOL, "///[next]"))
20 || source.starts_with(concatcp!(SOURCE_URL_PROTOCOL, "///[turbopack]"))
21 || source.contains("/node_modules/")
22 || source.ends_with("__nextjs-internal-proxy.cjs")
23 || source.ends_with("__nextjs-internal-proxy.mjs")
24 {
25 ignored_ids.insert(source_id);
26 }
27 }
28
29 for ignored_id in ignored_ids {
30 map.add_to_ignore_list(ignored_id as _);
31 }
32}
33
34#[derive(Serialize, Deserialize)]
35struct SourceMapSectionOffsetJson {
36 line: u32,
37 offset: u32,
38}
39
40#[derive(Serialize, Deserialize)]
41struct SourceMapSectionItemJson {
42 offset: SourceMapSectionOffsetJson,
43 map: SourceMapJson,
44}
45
46#[derive(Serialize, Deserialize)]
49#[serde(rename_all = "camelCase")]
50struct SourceMapJson {
51 version: u32,
52 #[serde(skip_serializing_if = "Option::is_none")]
53 file: Option<String>,
54 #[serde(skip_serializing_if = "Option::is_none")]
55 source_root: Option<String>,
56 #[serde(skip_serializing_if = "Option::is_none")]
58 sources: Option<Vec<Option<String>>>,
59 #[serde(skip_serializing_if = "Option::is_none")]
60 sources_content: Option<Vec<Option<String>>>,
61 #[serde(skip_serializing_if = "Option::is_none")]
62 names: Option<Vec<String>>,
63 mappings: String,
64 #[serde(skip_serializing_if = "Option::is_none")]
65 ignore_list: Option<Vec<u32>>,
66
67 debug_id: Option<String>,
69
70 #[serde(skip_serializing_if = "Option::is_none")]
71 sections: Option<Vec<SourceMapSectionItemJson>>,
72}
73
74pub async fn resolve_source_map_sources(
77 map: Option<&Rope>,
78 origin: FileSystemPath,
79) -> Result<Option<Rope>> {
80 async fn resolve_source(
81 original_source: &mut String,
82 original_content: Option<&mut Option<String>>,
83 origin: FileSystemPath,
84 ) -> Result<()> {
85 if let Some(path) = origin.parent().try_join(original_source)? {
86 let path_str = path.value_to_string().await?;
87 let source = format!("{SOURCE_URL_PROTOCOL}///{path_str}");
88 *original_source = source;
89
90 if let Some(original_content) = original_content
91 && original_content.is_none()
92 {
93 if let FileContent::Content(file) = &*path.read().await? {
94 let text = file.content().to_str()?;
95 *original_content = Some(text.to_string())
96 } else {
97 *original_content = Some(format!("unable to read source {path_str}"));
98 }
99 }
100 } else {
101 let origin_str = origin.value_to_string().await?;
102 static INVALID_REGEX: Lazy<Regex> =
103 Lazy::new(|| Regex::new(r#"(?:^|/)(?:\.\.?(?:/|$))+"#).unwrap());
104 let source = INVALID_REGEX.replace_all(original_source, |s: ®ex::Captures<'_>| {
105 s[0].replace('.', "_")
106 });
107 *original_source = format!("{SOURCE_URL_PROTOCOL}///{origin_str}/{source}");
108 if let Some(original_content) = original_content
109 && original_content.is_none()
110 {
111 *original_content = Some(format!(
112 "unable to access {original_source} in {origin_str} (it's leaving the \
113 filesystem root)"
114 ));
115 }
116 }
117 anyhow::Ok(())
118 }
119
120 async fn resolve_map(map: &mut SourceMapJson, origin: FileSystemPath) -> Result<()> {
121 if let Some(sources) = &mut map.sources {
122 let mut contents = if let Some(mut contents) = map.sources_content.take() {
123 contents.resize(sources.len(), None);
124 contents
125 } else {
126 Vec::with_capacity(sources.len())
127 };
128
129 for (source, content) in sources.iter_mut().zip(contents.iter_mut()) {
130 if let Some(source) = source {
131 resolve_source(source, Some(content), origin.clone()).await?;
132 }
133 }
134
135 map.sources_content = Some(contents);
136 }
137 Ok(())
138 }
139
140 let Some(map) = map else {
141 return Ok(None);
142 };
143
144 let Ok(mut map): serde_json::Result<SourceMapJson> = serde_json::from_reader(map.read()) else {
145 return Ok(None);
147 };
148
149 if let Some(file) = &mut map.file {
150 resolve_source(file, None, origin.clone()).await?;
151 }
152
153 resolve_map(&mut map, origin.clone()).await?;
154 for section in map.sections.iter_mut().flatten() {
155 resolve_map(&mut section.map, origin.clone()).await?;
156 }
157
158 let map = Rope::from(serde_json::to_vec(&map)?);
159 Ok(Some(map))
160}
161
162pub async fn fileify_source_map(
165 map: Option<&Rope>,
166 context_path: FileSystemPath,
167) -> Result<Option<Rope>> {
168 let Some(map) = map else {
169 return Ok(None);
170 };
171
172 let Ok(mut map): serde_json::Result<SourceMapJson> = serde_json::from_reader(map.read()) else {
173 return Ok(None);
175 };
176
177 let context_fs = context_path.fs();
178 let context_fs = &*Vc::try_resolve_downcast_type::<DiskFileSystem>(context_fs)
179 .await?
180 .context("Expected the chunking context to have a DiskFileSystem")?
181 .await?;
182 let prefix = format!("{}///[{}]/", SOURCE_URL_PROTOCOL, context_fs.name());
183
184 let transform_source = async |src: &mut Option<String>| {
185 if let Some(src) = src
186 && let Some(src_rest) = src.strip_prefix(&prefix)
187 {
188 *src = uri_from_file(context_path.clone(), Some(src_rest)).await?;
189 }
190 anyhow::Ok(())
191 };
192
193 for src in map.sources.iter_mut().flatten() {
194 transform_source(src).await?;
195 }
196 for section in map.sections.iter_mut().flatten() {
197 for src in section.map.sources.iter_mut().flatten() {
198 transform_source(src).await?;
199 }
200 }
201
202 let map = Rope::from(serde_json::to_vec(&map)?);
203
204 Ok(Some(map))
205}