1use std::{
2 borrow::Cow,
3 io::Write,
4 path::{Path, PathBuf},
5 sync::Arc,
6 thread,
7 time::Duration,
8};
9
10use anyhow::{Context, Result, anyhow, bail};
11use bincode::{Decode, Encode};
12use flate2::write::GzEncoder;
13use futures_util::TryFutureExt;
14use napi::{
15 Env, JsFunction, JsObject, Status,
16 bindgen_prelude::{External, within_runtime_if_available},
17 threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
18};
19use napi_derive::napi;
20use next_api::{
21 entrypoints::Entrypoints,
22 next_server_nft::next_server_nft_assets,
23 operation::{
24 EntrypointsOperation, InstrumentationOperation, MiddlewareOperation, OptionEndpoint,
25 RouteOperation,
26 },
27 project::{
28 DebugBuildPaths, DefineEnv, DraftModeOptions, HmrTarget, PartialProjectOptions, Project,
29 ProjectContainer, ProjectOptions, WatchOptions,
30 },
31 project_asset_hashes_manifest::immutable_hashes_manifest_asset_if_enabled,
32 route::{Endpoint, EndpointGroupKey, Route},
33 routes_hashes_manifest::routes_hashes_manifest_asset_if_enabled,
34};
35use next_core::{
36 app_structure::find_app_dir,
37 tracing_presets::{
38 TRACING_NEXT_OVERVIEW_TARGETS, TRACING_NEXT_TARGETS, TRACING_NEXT_TURBO_TASKS_TARGETS,
39 TRACING_NEXT_TURBOPACK_TARGETS,
40 },
41};
42use once_cell::sync::Lazy;
43use rand::RngExt;
44use serde::Serialize;
45use tokio::{io::AsyncWriteExt, runtime::Handle, time::Instant};
46use tracing::Instrument;
47use tracing_subscriber::{Registry, layer::SubscriberExt, util::SubscriberInitExt};
48use turbo_rcstr::{RcStr, rcstr};
49use turbo_tasks::{
50 Effects, FxIndexSet, NonLocalValue, OperationValue, OperationVc, PrettyPrintError, ReadRef,
51 ResolvedVc, TaskInput, TransientInstance, TryJoinIterExt, TurboTasksApi, TurboTasksCallApi,
52 UpdateInfo, Vc, get_effects,
53 message_queue::{CompilationEvent, Severity},
54 trace::TraceRawVcs,
55};
56use turbo_tasks_backend::{BackingStorage, db_invalidation::invalidation_reasons};
57use turbo_tasks_fs::{
58 DiskFileSystem, FileContent, FileSystem, FileSystemPath, invalidation, util::uri_from_file,
59};
60use turbo_unix_path::{get_relative_path_to, sys_to_unix, unix_to_sys};
61use turbopack_core::{
62 PROJECT_FILESYSTEM_NAME, SOURCE_URL_PROTOCOL,
63 diagnostics::PlainDiagnostic,
64 issue::{IssueFilter, PlainIssue},
65 output::{OutputAsset, OutputAssets},
66 source_map::{SourceMap, Token},
67 version::{PartialUpdate, TotalUpdate, Update, VersionState},
68};
69use turbopack_ecmascript_hmr_protocol::{ClientUpdateInstruction, Issue, ResourceIdentifier};
70use turbopack_trace_utils::{
71 exit::{ExitHandler, ExitReceiver},
72 filter_layer::FilterLayer,
73 raw_trace::RawTraceLayer,
74 trace_writer::TraceWriter,
75};
76use url::Url;
77
78use crate::{
79 next_api::{
80 analyze::{WriteAnalyzeResult, write_analyze_data_with_issues_operation},
81 endpoint::ExternalEndpoint,
82 turbopack_ctx::{
83 NapiNextTurbopackCallbacks, NapiNextTurbopackCallbacksJsObject, NextTurboTasks,
84 NextTurbopackContext, create_turbo_tasks,
85 },
86 utils::{
87 DetachedVc, NapiDiagnostic, NapiIssue, RootTask, TurbopackResult, get_diagnostics,
88 get_issues, strongly_consistent_catch_collectables, subscribe,
89 },
90 },
91 util::DhatProfilerGuard,
92};
93
94const SLOW_FILESYSTEM_THRESHOLD: Duration = Duration::from_millis(200);
97static SOURCE_MAP_PREFIX: Lazy<String> = Lazy::new(|| format!("{SOURCE_URL_PROTOCOL}///"));
98static SOURCE_MAP_PREFIX_PROJECT: Lazy<String> =
99 Lazy::new(|| format!("{SOURCE_URL_PROTOCOL}///[{PROJECT_FILESYSTEM_NAME}]/"));
100
101fn issue_filter_from_container(container: ResolvedVc<ProjectContainer>) -> Vc<IssueFilter> {
103 container.project().issue_filter()
104}
105
106#[napi(object)]
107#[derive(Clone, Debug)]
108pub struct NapiEnvVar {
109 pub name: RcStr,
110 pub value: RcStr,
111}
112
113#[napi(object)]
114#[derive(Clone, Debug)]
115pub struct NapiOptionEnvVar {
116 pub name: RcStr,
117 pub value: Option<RcStr>,
118}
119
120#[napi(object)]
121pub struct NapiDraftModeOptions {
122 pub preview_mode_id: RcStr,
123 pub preview_mode_encryption_key: RcStr,
124 pub preview_mode_signing_key: RcStr,
125}
126
127impl From<NapiDraftModeOptions> for DraftModeOptions {
128 fn from(val: NapiDraftModeOptions) -> Self {
129 DraftModeOptions {
130 preview_mode_id: val.preview_mode_id,
131 preview_mode_encryption_key: val.preview_mode_encryption_key,
132 preview_mode_signing_key: val.preview_mode_signing_key,
133 }
134 }
135}
136
137#[napi(object)]
138pub struct NapiWatchOptions {
139 pub enable: bool,
141
142 pub poll_interval_ms: Option<f64>,
145}
146
147#[napi(object)]
148pub struct NapiProjectOptions {
149 pub root_path: RcStr,
153
154 pub project_path: RcStr,
157
158 pub dist_dir: RcStr,
162
163 pub watch: NapiWatchOptions,
165
166 pub next_config: RcStr,
168
169 pub env: Vec<NapiEnvVar>,
171
172 pub define_env: NapiDefineEnv,
175
176 pub dev: bool,
178
179 pub encryption_key: RcStr,
181
182 pub build_id: RcStr,
184
185 pub preview_props: NapiDraftModeOptions,
187
188 pub browserslist_query: RcStr,
190
191 pub no_mangling: bool,
195
196 pub write_routes_hashes_manifest: bool,
198
199 pub current_node_js_version: RcStr,
201
202 pub debug_build_paths: Option<NapiDebugBuildPaths>,
205
206 pub deferred_entries: Option<Vec<RcStr>>,
208
209 pub is_persistent_caching_enabled: bool,
211
212 pub next_version: RcStr,
214
215 pub server_hmr: Option<bool>,
217}
218
219#[napi(object)]
221pub struct NapiPartialProjectOptions {
222 pub root_path: Option<RcStr>,
226
227 pub project_path: Option<RcStr>,
231
232 pub watch: Option<NapiWatchOptions>,
234
235 pub next_config: Option<RcStr>,
237
238 pub env: Option<Vec<NapiEnvVar>>,
240
241 pub define_env: Option<NapiDefineEnv>,
244
245 pub dev: Option<bool>,
247
248 pub encryption_key: Option<RcStr>,
250
251 pub build_id: Option<RcStr>,
253
254 pub preview_props: Option<NapiDraftModeOptions>,
256
257 pub browserslist_query: Option<RcStr>,
259
260 pub write_routes_hashes_manifest: Option<bool>,
262
263 pub no_mangling: Option<bool>,
267}
268
269#[napi(object)]
270#[derive(Clone, Debug)]
271pub struct NapiDefineEnv {
272 pub client: Vec<NapiOptionEnvVar>,
273 pub edge: Vec<NapiOptionEnvVar>,
274 pub nodejs: Vec<NapiOptionEnvVar>,
275}
276
277#[napi(object)]
278pub struct NapiTurboEngineOptions {
279 pub memory_limit: Option<f64>,
281 pub dependency_tracking: Option<bool>,
283 pub is_ci: Option<bool>,
285 pub is_short_session: Option<bool>,
287}
288
289impl From<NapiWatchOptions> for WatchOptions {
290 fn from(val: NapiWatchOptions) -> Self {
291 WatchOptions {
292 enable: val.enable,
293 poll_interval: val
294 .poll_interval_ms
295 .filter(|interval| !interval.is_nan() && interval.is_finite() && *interval > 0.0)
296 .map(|interval| Duration::from_secs_f64(interval / 1000.0)),
297 }
298 }
299}
300
301impl From<NapiProjectOptions> for ProjectOptions {
302 fn from(val: NapiProjectOptions) -> Self {
303 let NapiProjectOptions {
304 root_path,
305 project_path,
306 dist_dir: _,
308 watch,
309 next_config,
310 env,
311 define_env,
312 dev,
313 encryption_key,
314 build_id,
315 preview_props,
316 browserslist_query,
317 no_mangling,
318 write_routes_hashes_manifest,
319 current_node_js_version,
320 debug_build_paths,
321 deferred_entries,
322 is_persistent_caching_enabled,
323 next_version,
324 server_hmr,
325 } = val;
326 ProjectOptions {
327 root_path,
328 project_path,
329 watch: watch.into(),
330 next_config,
331 env: env.into_iter().map(|var| (var.name, var.value)).collect(),
332 define_env: define_env.into(),
333 dev,
334 encryption_key,
335 build_id,
336 preview_props: preview_props.into(),
337 browserslist_query,
338 no_mangling,
339 write_routes_hashes_manifest,
340 current_node_js_version,
341 debug_build_paths: debug_build_paths.map(|p| DebugBuildPaths {
342 app: p.app,
343 pages: p.pages,
344 }),
345 deferred_entries,
346 is_persistent_caching_enabled,
347 next_version,
348 server_hmr: server_hmr.unwrap_or(false),
349 }
350 }
351}
352
353impl From<NapiPartialProjectOptions> for PartialProjectOptions {
354 fn from(val: NapiPartialProjectOptions) -> Self {
355 let NapiPartialProjectOptions {
356 root_path,
357 project_path,
358 watch,
359 next_config,
360 env,
361 define_env,
362 dev,
363 encryption_key,
364 build_id,
365 preview_props,
366 browserslist_query,
367 no_mangling,
368 write_routes_hashes_manifest,
369 } = val;
370 PartialProjectOptions {
371 root_path,
372 project_path,
373 watch: watch.map(From::from),
374 next_config,
375 env: env.map(|env| env.into_iter().map(|var| (var.name, var.value)).collect()),
376 define_env: define_env.map(|env| env.into()),
377 dev,
378 encryption_key,
379 build_id,
380 preview_props: preview_props.map(|props| props.into()),
381 browserslist_query,
382 no_mangling,
383 write_routes_hashes_manifest,
384 debug_build_paths: None,
385 }
386 }
387}
388
389impl From<NapiDefineEnv> for DefineEnv {
390 fn from(val: NapiDefineEnv) -> Self {
391 DefineEnv {
392 client: val
393 .client
394 .into_iter()
395 .map(|var| (var.name, var.value))
396 .collect(),
397 edge: val
398 .edge
399 .into_iter()
400 .map(|var| (var.name, var.value))
401 .collect(),
402 nodejs: val
403 .nodejs
404 .into_iter()
405 .map(|var| (var.name, var.value))
406 .collect(),
407 }
408 }
409}
410
411pub struct ProjectInstance {
412 turbopack_ctx: NextTurbopackContext,
413 container: ResolvedVc<ProjectContainer>,
414 exit_receiver: tokio::sync::Mutex<Option<ExitReceiver>>,
415}
416
417#[napi(ts_return_type = "Promise<{ __napiType: \"Project\" }>")]
418pub fn project_new(
419 env: Env,
420 options: NapiProjectOptions,
421 turbo_engine_options: NapiTurboEngineOptions,
422 napi_callbacks: NapiNextTurbopackCallbacksJsObject,
423) -> napi::Result<JsObject> {
424 let napi_callbacks = NapiNextTurbopackCallbacks::from_js(&env, napi_callbacks)?;
425 let (exit, exit_receiver) = ExitHandler::new_receiver();
426
427 if let Some(dhat_profiler) = DhatProfilerGuard::try_init() {
428 exit.on_exit(async move {
429 tokio::task::spawn_blocking(move || drop(dhat_profiler))
430 .await
431 .unwrap()
432 });
433 }
434
435 let mut trace = std::env::var("NEXT_TURBOPACK_TRACING")
436 .ok()
437 .filter(|v| !v.is_empty());
438
439 if cfg!(feature = "tokio-console") && trace.is_none() {
440 trace = Some("overview".to_owned());
444 }
445
446 enum Compression {
447 None,
448 GzipFast,
449 GzipBest,
450 }
451 let mut compress = Compression::None;
452 if let Some(mut trace) = trace {
453 let internal_dir = PathBuf::from(&options.root_path)
454 .join(&options.project_path)
455 .join(&options.dist_dir);
456 let trace_file = internal_dir.join("trace-turbopack");
457
458 println!("Turbopack tracing enabled with targets: {trace}");
459 println!(" Note that this might have a small performance impact.");
460 println!(" Trace output will be written to {}", trace_file.display());
461
462 trace = trace
463 .split(",")
464 .filter_map(|item| {
465 Some(match item {
467 "overview" | "1" => Cow::Owned(TRACING_NEXT_OVERVIEW_TARGETS.join(",")),
468 "next" => Cow::Owned(TRACING_NEXT_TARGETS.join(",")),
469 "turbopack" => Cow::Owned(TRACING_NEXT_TURBOPACK_TARGETS.join(",")),
470 "turbo-tasks" => Cow::Owned(TRACING_NEXT_TURBO_TASKS_TARGETS.join(",")),
471 "gz" => {
472 compress = Compression::GzipFast;
473 return None;
474 }
475 "gz-best" => {
476 compress = Compression::GzipBest;
477 return None;
478 }
479 _ => Cow::Borrowed(item),
480 })
481 })
482 .intersperse_with(|| Cow::Borrowed(","))
483 .collect::<String>();
484
485 let subscriber = Registry::default();
486
487 if cfg!(feature = "tokio-console") {
488 trace = format!("{trace},tokio=trace,runtime=trace");
489 }
490 #[cfg(feature = "tokio-console")]
491 let subscriber = subscriber.with(console_subscriber::spawn());
492
493 let subscriber = subscriber.with(FilterLayer::try_new(&trace).unwrap());
494
495 std::fs::create_dir_all(&internal_dir)
496 .context("Unable to create .next directory")
497 .unwrap();
498 let (trace_writer, trace_writer_guard) = match compress {
499 Compression::None => {
500 let trace_writer = std::fs::File::create(trace_file.clone()).unwrap();
501 TraceWriter::new(trace_writer)
502 }
503 Compression::GzipFast => {
504 let trace_writer = std::fs::File::create(trace_file.clone()).unwrap();
505 let trace_writer = GzEncoder::new(trace_writer, flate2::Compression::fast());
506 TraceWriter::new(trace_writer)
507 }
508 Compression::GzipBest => {
509 let trace_writer = std::fs::File::create(trace_file.clone()).unwrap();
510 let trace_writer = GzEncoder::new(trace_writer, flate2::Compression::best());
511 TraceWriter::new(trace_writer)
512 }
513 };
514 let subscriber = subscriber.with(RawTraceLayer::new(trace_writer));
515
516 exit.on_exit(async move {
517 tokio::task::spawn_blocking(move || drop(trace_writer_guard))
518 .await
519 .unwrap();
520 });
521
522 let trace_server = std::env::var("NEXT_TURBOPACK_TRACE_SERVER").ok();
523 if trace_server.is_some() {
524 thread::spawn(move || {
525 turbopack_trace_server::start_turbopack_trace_server(trace_file, None);
526 });
527 println!("Turbopack trace server started. View trace at https://trace.nextjs.org");
528 }
529
530 subscriber.init();
531 }
532
533 env.spawn_future(
534 async move {
535 let memory_limit = turbo_engine_options
536 .memory_limit
537 .map(|m| m as usize)
538 .unwrap_or(usize::MAX);
539 let dependency_tracking = turbo_engine_options.dependency_tracking.unwrap_or(true);
540 let is_ci = turbo_engine_options.is_ci.unwrap_or(false);
541 let is_short_session = turbo_engine_options.is_short_session.unwrap_or(false);
542 let turbo_tasks = create_turbo_tasks(
543 PathBuf::from(&options.dist_dir),
544 options.is_persistent_caching_enabled,
545 memory_limit,
546 dependency_tracking,
547 is_ci,
548 is_short_session,
549 )?;
550 let turbopack_ctx = NextTurbopackContext::new(turbo_tasks.clone(), napi_callbacks);
551
552 if let Some(stats_path) = std::env::var_os("NEXT_TURBOPACK_TASK_STATISTICS") {
553 let task_stats = turbo_tasks.task_statistics().enable().clone();
554 exit.on_exit(async move {
555 tokio::task::spawn_blocking(move || {
556 let mut file = std::fs::File::create(&stats_path)
557 .with_context(|| format!("failed to create or open {stats_path:?}"))?;
558 serde_json::to_writer(&file, &task_stats)
559 .context("failed to serialize or write task statistics")?;
560 file.flush().context("failed to flush file")
561 })
562 .await
563 .unwrap()
564 .unwrap();
565 });
566 }
567
568 let options = ProjectOptions::from(options);
569 let is_dev = options.dev;
570 let root_path = options.root_path.clone();
571 let container = turbo_tasks
572 .run(async move {
573 let container_op = ProjectContainer::new_operation(rcstr!("next.js"), is_dev);
574 ProjectContainer::initialize(container_op, options).await?;
575 container_op.resolve_strongly_consistent().await
576 })
577 .or_else(|e| turbopack_ctx.throw_turbopack_internal_result(&e.into()))
578 .await?;
579
580 if is_dev {
581 Handle::current().spawn({
582 let tt = turbo_tasks.clone();
583 let root_path = root_path.clone();
584 async move {
585 let result = tt
586 .clone()
587 .run(async move {
588 #[turbo_tasks::function(operation)]
589 fn project_node_root_path_operation(
590 container: ResolvedVc<ProjectContainer>,
591 ) -> Vc<FileSystemPath> {
592 container.project().node_root()
593 }
594
595 let mut absolute_benchmark_dir = PathBuf::from(root_path);
596 absolute_benchmark_dir.push(
597 &project_node_root_path_operation(container)
598 .read_strongly_consistent()
599 .await?
600 .path,
601 );
602 benchmark_file_io(&tt, &absolute_benchmark_dir).await
603 })
604 .await;
605 if let Err(err) = result {
606 println!("Failed to benchmark file I/O: {err}");
609 }
610 }
611 .instrument(tracing::info_span!("benchmark file I/O"))
612 });
613 }
614
615 Ok(External::new(ProjectInstance {
616 turbopack_ctx,
617 container,
618 exit_receiver: tokio::sync::Mutex::new(Some(exit_receiver)),
619 }))
620 }
621 .instrument(tracing::info_span!("create project")),
622 )
623}
624
625#[derive(Debug, Clone, Serialize)]
626struct SlowFilesystemEvent {
627 directory: String,
628 duration_ms: u128,
629}
630
631impl CompilationEvent for SlowFilesystemEvent {
632 fn type_name(&self) -> &'static str {
633 "SlowFilesystemEvent"
634 }
635
636 fn severity(&self) -> Severity {
637 Severity::Warning
638 }
639
640 fn message(&self) -> String {
641 format!(
642 "Slow filesystem detected. The benchmark took {}ms. If {} is a network drive, \
643 consider moving it to a local folder.\n\
644 See more: https://nextjs.org/docs/app/guides/local-development",
645 self.duration_ms, self.directory
646 )
647 }
648
649 fn to_json(&self) -> String {
650 serde_json::to_string(self).unwrap()
651 }
652}
653
654async fn benchmark_file_io(turbo_tasks: &NextTurboTasks, dir: &Path) -> Result<()> {
662 let temp_path = dir.join(format!(
663 "tmp_file_io_benchmark_{:x}",
664 rand::random::<u128>()
665 ));
666
667 let mut random_buffer = [0u8; 512];
668 rand::rng().fill(&mut random_buffer[..]);
669
670 let start = Instant::now();
674 async {
675 for _ in 0..3 {
676 let mut file = tokio::fs::File::create(&temp_path).await?;
678 file.write_all(&random_buffer).await?;
679 file.sync_all().await?;
680 drop(file);
681
682 tokio::fs::remove_file(&temp_path).await?;
684 }
685 anyhow::Ok(())
686 }
687 .instrument(tracing::info_span!("benchmark file IO (measurement)", path = %temp_path.display()))
688 .await?;
689
690 let duration = Instant::now().duration_since(start);
691 if duration > SLOW_FILESYSTEM_THRESHOLD {
692 turbo_tasks.send_compilation_event(Arc::new(SlowFilesystemEvent {
693 directory: dir.to_string_lossy().into(),
694 duration_ms: duration.as_millis(),
695 }));
696 }
697
698 Ok(())
699}
700
701#[tracing::instrument(level = "info", name = "update project", skip_all)]
702#[napi]
703pub async fn project_update(
704 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
705 options: NapiPartialProjectOptions,
706) -> napi::Result<()> {
707 let ctx = &project.turbopack_ctx;
708 let options = options.into();
709 let container = project.container;
710
711 ctx.turbo_tasks()
712 .run(async move { container.update(options).await })
713 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
714 .await
715}
716
717#[napi]
720pub async fn project_invalidate_file_system_cache(
721 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
722) -> napi::Result<()> {
723 tokio::task::spawn_blocking(move || {
724 project
727 .turbopack_ctx
728 .turbo_tasks()
729 .backend()
730 .backing_storage()
731 .invalidate(invalidation_reasons::USER_REQUEST)
732 })
733 .await
734 .context("panicked while invalidating filesystem cache")??;
735 Ok(())
736}
737
738#[napi]
743pub async fn project_on_exit(
744 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
745) {
746 project_on_exit_internal(&project).await
747}
748
749async fn project_on_exit_internal(project: &ProjectInstance) {
750 let exit_receiver = project.exit_receiver.lock().await.take();
751 exit_receiver
752 .expect("`project.onExitSync` must only be called once")
753 .run_exit_handler()
754 .await;
755}
756
757#[tracing::instrument(level = "info", name = "shutdown project", skip_all)]
763#[napi]
764pub async fn project_shutdown(
765 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
766) {
767 project.turbopack_ctx.turbo_tasks().stop_and_wait().await;
768 project_on_exit_internal(&project).await;
769}
770
771#[napi(object)]
772#[derive(Default)]
773pub struct AppPageNapiRoute {
774 pub original_name: Option<RcStr>,
776
777 pub html_endpoint: Option<External<ExternalEndpoint>>,
778 pub rsc_endpoint: Option<External<ExternalEndpoint>>,
779}
780
781#[napi(object)]
782#[derive(Default)]
783pub struct NapiRoute {
784 pub pathname: String,
786 pub original_name: Option<RcStr>,
788
789 pub r#type: &'static str,
791
792 pub pages: Option<Vec<AppPageNapiRoute>>,
793
794 pub endpoint: Option<External<ExternalEndpoint>>,
796 pub html_endpoint: Option<External<ExternalEndpoint>>,
797 pub rsc_endpoint: Option<External<ExternalEndpoint>>,
798 pub data_endpoint: Option<External<ExternalEndpoint>>,
799}
800
801impl NapiRoute {
802 fn from_route(
803 pathname: String,
804 value: RouteOperation,
805 turbopack_ctx: &NextTurbopackContext,
806 ) -> Self {
807 let convert_endpoint = |endpoint: OperationVc<OptionEndpoint>| {
808 Some(External::new(ExternalEndpoint(DetachedVc::new(
809 turbopack_ctx.clone(),
810 endpoint,
811 ))))
812 };
813 match value {
814 RouteOperation::Page {
815 html_endpoint,
816 data_endpoint,
817 } => NapiRoute {
818 pathname,
819 r#type: "page",
820 html_endpoint: convert_endpoint(html_endpoint),
821 data_endpoint: convert_endpoint(data_endpoint),
822 ..Default::default()
823 },
824 RouteOperation::PageApi { endpoint } => NapiRoute {
825 pathname,
826 r#type: "page-api",
827 endpoint: convert_endpoint(endpoint),
828 ..Default::default()
829 },
830 RouteOperation::AppPage(pages) => NapiRoute {
831 pathname,
832 r#type: "app-page",
833 pages: Some(
834 pages
835 .into_iter()
836 .map(|page_route| AppPageNapiRoute {
837 original_name: Some(page_route.original_name),
838 html_endpoint: convert_endpoint(page_route.html_endpoint),
839 rsc_endpoint: convert_endpoint(page_route.rsc_endpoint),
840 })
841 .collect(),
842 ),
843 ..Default::default()
844 },
845 RouteOperation::AppRoute {
846 original_name,
847 endpoint,
848 } => NapiRoute {
849 pathname,
850 original_name: Some(original_name),
851 r#type: "app-route",
852 endpoint: convert_endpoint(endpoint),
853 ..Default::default()
854 },
855 RouteOperation::Conflict => NapiRoute {
856 pathname,
857 r#type: "conflict",
858 ..Default::default()
859 },
860 }
861 }
862}
863
864#[napi(object)]
865pub struct NapiMiddleware {
866 pub endpoint: External<ExternalEndpoint>,
867 pub is_proxy: bool,
868}
869
870impl NapiMiddleware {
871 fn from_middleware(
872 value: &MiddlewareOperation,
873 turbopack_ctx: &NextTurbopackContext,
874 ) -> Result<Self> {
875 Ok(NapiMiddleware {
876 endpoint: External::new(ExternalEndpoint(DetachedVc::new(
877 turbopack_ctx.clone(),
878 value.endpoint,
879 ))),
880 is_proxy: value.is_proxy,
881 })
882 }
883}
884
885#[napi(object)]
886pub struct NapiInstrumentation {
887 pub node_js: External<ExternalEndpoint>,
888 pub edge: External<ExternalEndpoint>,
889}
890
891impl NapiInstrumentation {
892 fn from_instrumentation(
893 value: &InstrumentationOperation,
894 turbopack_ctx: &NextTurbopackContext,
895 ) -> Result<Self> {
896 Ok(NapiInstrumentation {
897 node_js: External::new(ExternalEndpoint(DetachedVc::new(
898 turbopack_ctx.clone(),
899 value.node_js,
900 ))),
901 edge: External::new(ExternalEndpoint(DetachedVc::new(
902 turbopack_ctx.clone(),
903 value.edge,
904 ))),
905 })
906 }
907}
908
909#[napi(object)]
910pub struct NapiEntrypoints {
911 pub routes: Vec<NapiRoute>,
912 pub middleware: Option<NapiMiddleware>,
913 pub instrumentation: Option<NapiInstrumentation>,
914 pub pages_document_endpoint: External<ExternalEndpoint>,
915 pub pages_app_endpoint: External<ExternalEndpoint>,
916 pub pages_error_endpoint: External<ExternalEndpoint>,
917}
918
919impl NapiEntrypoints {
920 fn from_entrypoints_op(
921 entrypoints: &EntrypointsOperation,
922 turbopack_ctx: &NextTurbopackContext,
923 ) -> Result<Self> {
924 let routes = entrypoints
925 .routes
926 .iter()
927 .map(|(k, v)| NapiRoute::from_route(k.to_string(), v.clone(), turbopack_ctx))
928 .collect();
929 let middleware = entrypoints
930 .middleware
931 .as_ref()
932 .map(|m| NapiMiddleware::from_middleware(m, turbopack_ctx))
933 .transpose()?;
934 let instrumentation = entrypoints
935 .instrumentation
936 .as_ref()
937 .map(|i| NapiInstrumentation::from_instrumentation(i, turbopack_ctx))
938 .transpose()?;
939 let pages_document_endpoint = External::new(ExternalEndpoint(DetachedVc::new(
940 turbopack_ctx.clone(),
941 entrypoints.pages_document_endpoint,
942 )));
943 let pages_app_endpoint = External::new(ExternalEndpoint(DetachedVc::new(
944 turbopack_ctx.clone(),
945 entrypoints.pages_app_endpoint,
946 )));
947 let pages_error_endpoint = External::new(ExternalEndpoint(DetachedVc::new(
948 turbopack_ctx.clone(),
949 entrypoints.pages_error_endpoint,
950 )));
951 Ok(NapiEntrypoints {
952 routes,
953 middleware,
954 instrumentation,
955 pages_document_endpoint,
956 pages_app_endpoint,
957 pages_error_endpoint,
958 })
959 }
960}
961
962#[turbo_tasks::value(serialization = "none")]
963struct EntrypointsWithIssues {
964 entrypoints: Option<ReadRef<EntrypointsOperation>>,
965 issues: Arc<Vec<ReadRef<PlainIssue>>>,
966 diagnostics: Arc<Vec<ReadRef<PlainDiagnostic>>>,
967 effects: Arc<Effects>,
968}
969
970#[turbo_tasks::function(operation)]
971async fn get_entrypoints_with_issues_operation(
972 container: ResolvedVc<ProjectContainer>,
973) -> Result<Vc<EntrypointsWithIssues>> {
974 let entrypoints_operation =
975 EntrypointsOperation::new(project_container_entrypoints_operation(container));
976 let filter = issue_filter_from_container(container);
977 let (entrypoints, issues, diagnostics, effects) =
978 strongly_consistent_catch_collectables(entrypoints_operation, filter).await?;
979 Ok(EntrypointsWithIssues {
980 entrypoints,
981 issues,
982 diagnostics,
983 effects,
984 }
985 .cell())
986}
987
988#[turbo_tasks::function(operation)]
989fn project_container_entrypoints_operation(
990 container: ResolvedVc<ProjectContainer>,
993) -> Vc<Entrypoints> {
994 container.entrypoints()
995}
996
997#[turbo_tasks::value(serialization = "none")]
998struct OperationResult {
999 issues: Arc<Vec<ReadRef<PlainIssue>>>,
1000 diagnostics: Arc<Vec<ReadRef<PlainDiagnostic>>>,
1001 effects: Arc<Effects>,
1002}
1003
1004#[turbo_tasks::value(serialization = "none")]
1005struct AllWrittenEntrypointsWithIssues {
1006 entrypoints: Option<ReadRef<EntrypointsOperation>>,
1007 issues: Arc<Vec<ReadRef<PlainIssue>>>,
1008 diagnostics: Arc<Vec<ReadRef<PlainDiagnostic>>>,
1009 effects: Arc<Effects>,
1010}
1011
1012#[napi(object)]
1013#[derive(Clone, Debug)]
1014pub struct NapiDebugBuildPaths {
1015 pub app: Vec<RcStr>,
1016 pub pages: Vec<RcStr>,
1017}
1018
1019#[derive(
1020 Clone,
1021 Copy,
1022 Debug,
1023 Eq,
1024 Hash,
1025 NonLocalValue,
1026 OperationValue,
1027 PartialEq,
1028 TaskInput,
1029 TraceRawVcs,
1030 Encode,
1031 Decode,
1032)]
1033enum EntrypointsWritePhase {
1034 All,
1035 NonDeferred,
1036 Deferred,
1037}
1038
1039fn normalize_deferred_route(route: &str) -> String {
1040 let with_leading_slash = if route.starts_with('/') {
1041 route.to_owned()
1042 } else {
1043 format!("/{route}")
1044 };
1045
1046 if with_leading_slash.len() > 1 && with_leading_slash.ends_with('/') {
1047 with_leading_slash
1048 .strip_suffix('/')
1049 .unwrap_or_default()
1050 .to_owned()
1051 } else {
1052 with_leading_slash
1053 }
1054}
1055
1056fn is_deferred_app_route(route: &str, deferred_entries: &[RcStr]) -> bool {
1057 let normalized_route = normalize_deferred_route(route);
1058
1059 deferred_entries.iter().any(|entry| {
1060 let normalized_entry = normalize_deferred_route(entry);
1061 normalized_route == normalized_entry
1062 || normalized_route.starts_with(&format!("{normalized_entry}/"))
1063 })
1064}
1065
1066#[derive(Clone, Debug, TraceRawVcs)]
1067struct DeferredPhaseBuildPaths {
1068 non_deferred: DebugBuildPaths,
1069 all: DebugBuildPaths,
1070 deferred_invalidation_dirs: Vec<RcStr>,
1071}
1072
1073fn to_app_debug_path(route: &str, leaf: &'static str) -> RcStr {
1074 let with_leading_slash = if route.starts_with('/') {
1075 route.to_owned()
1076 } else {
1077 format!("/{route}")
1078 };
1079
1080 let normalized_route = if with_leading_slash.len() > 1 && with_leading_slash.ends_with('/') {
1081 with_leading_slash.trim_end_matches('/').to_owned()
1082 } else {
1083 with_leading_slash
1084 };
1085
1086 if normalized_route == "/" {
1087 format!("/{leaf}").into()
1088 } else {
1089 format!("{normalized_route}/{leaf}").into()
1090 }
1091}
1092
1093fn app_entry_source_dir_from_original_name(original_name: &str) -> RcStr {
1094 let normalized_name = normalize_deferred_route(original_name);
1095 let mut segments = normalized_name
1096 .trim_start_matches('/')
1097 .split('/')
1098 .filter(|segment| !segment.is_empty())
1099 .collect::<Vec<_>>();
1100
1101 if !segments.is_empty() {
1102 segments.pop();
1103 }
1104
1105 if segments.is_empty() {
1106 rcstr!("/")
1107 } else {
1108 format!("/{}", segments.join("/")).into()
1109 }
1110}
1111
1112fn compute_deferred_phase_build_paths(
1113 entrypoints: &Entrypoints,
1114 deferred_entries: &[RcStr],
1115) -> DeferredPhaseBuildPaths {
1116 let mut non_deferred_app = FxIndexSet::default();
1117 let mut deferred_app = FxIndexSet::default();
1118 let mut deferred_invalidation_dirs = FxIndexSet::default();
1119 let mut pages = FxIndexSet::default();
1120
1121 for (route_key, route) in entrypoints.routes.iter() {
1122 match route {
1123 Route::Page { .. } | Route::PageApi { .. } => {
1124 pages.insert(route_key.clone());
1125 }
1126 Route::AppPage(app_page_routes) => {
1127 let app_debug_path = to_app_debug_path(route_key.as_str(), "page");
1128 if is_deferred_app_route(route_key.as_str(), deferred_entries) {
1129 deferred_app.insert(app_debug_path);
1130 deferred_invalidation_dirs.extend(app_page_routes.iter().map(|route| {
1131 app_entry_source_dir_from_original_name(route.original_name.as_str())
1132 }));
1133 } else {
1134 non_deferred_app.insert(app_debug_path);
1135 }
1136 }
1137 Route::AppRoute { original_name, .. } => {
1138 let app_debug_path = to_app_debug_path(route_key.as_str(), "route");
1139 if is_deferred_app_route(route_key.as_str(), deferred_entries) {
1140 deferred_app.insert(app_debug_path);
1141 deferred_invalidation_dirs.insert(app_entry_source_dir_from_original_name(
1142 original_name.as_str(),
1143 ));
1144 } else {
1145 non_deferred_app.insert(app_debug_path);
1146 }
1147 }
1148 Route::Conflict => {}
1149 }
1150 }
1151
1152 let pages_vec = pages.into_iter().collect::<Vec<_>>();
1153 let all_app_vec = non_deferred_app
1154 .iter()
1155 .chain(deferred_app.iter())
1156 .cloned()
1157 .collect::<FxIndexSet<_>>()
1158 .into_iter()
1159 .collect::<Vec<_>>();
1160
1161 DeferredPhaseBuildPaths {
1162 non_deferred: DebugBuildPaths {
1163 app: non_deferred_app.into_iter().collect::<Vec<_>>(),
1164 pages: pages_vec.clone(),
1165 },
1166 all: DebugBuildPaths {
1167 app: all_app_vec,
1168 pages: pages_vec,
1169 },
1170 deferred_invalidation_dirs: deferred_invalidation_dirs.into_iter().collect::<Vec<_>>(),
1171 }
1172}
1173
1174async fn invalidate_deferred_entry_source_dirs_after_callback(
1175 container: ResolvedVc<ProjectContainer>,
1176 deferred_invalidation_dirs: Vec<RcStr>,
1177) -> Result<()> {
1178 if deferred_invalidation_dirs.is_empty() {
1179 return Ok(());
1180 }
1181
1182 #[turbo_tasks::value(cell = "new", eq = "manual")]
1183 struct ProjectInfo(Option<FileSystemPath>, DiskFileSystem);
1184
1185 #[turbo_tasks::function(operation)]
1186 async fn project_info_operation(
1187 container: ResolvedVc<ProjectContainer>,
1188 ) -> Result<Vc<ProjectInfo>> {
1189 let project = container.project();
1190 let app_dir = find_app_dir(project.project_path().owned().await?)
1191 .owned()
1192 .await?;
1193 let project_fs = project.project_fs().owned().await?;
1194 Ok(ProjectInfo(app_dir, project_fs).cell())
1195 }
1196 let ProjectInfo(app_dir, project_fs) = &*project_info_operation(container)
1197 .read_strongly_consistent()
1198 .await?;
1199
1200 let Some(app_dir) = app_dir else {
1201 return Ok(());
1202 };
1203 let app_dir_sys_path = project_fs.to_sys_path(app_dir);
1204 let paths_to_invalidate = deferred_invalidation_dirs
1205 .into_iter()
1206 .map(|dir| {
1207 let normalized_dir = normalize_deferred_route(dir.as_str());
1208 let relative_dir = normalized_dir.trim_start_matches('/');
1209 if relative_dir.is_empty() {
1210 app_dir_sys_path.clone()
1211 } else {
1212 app_dir_sys_path.join(unix_to_sys(relative_dir).as_ref())
1213 }
1214 })
1215 .collect::<FxIndexSet<_>>()
1216 .into_iter()
1217 .collect::<Vec<_>>();
1218
1219 if paths_to_invalidate.is_empty() {
1220 project_fs.invalidate_with_reason(|path| invalidation::Initialize {
1222 path: RcStr::from(path.to_string_lossy()),
1223 });
1224 } else {
1225 project_fs.invalidate_path_and_children_with_reason(paths_to_invalidate, |path| {
1226 invalidation::Initialize {
1227 path: RcStr::from(path.to_string_lossy()),
1228 }
1229 });
1230 }
1231
1232 Ok(())
1233}
1234
1235fn is_deferred_endpoint_group(key: &EndpointGroupKey, deferred_entries: &[RcStr]) -> bool {
1236 if deferred_entries.is_empty() {
1237 return false;
1238 }
1239
1240 let EndpointGroupKey::Route(route_key) = key else {
1241 return false;
1242 };
1243
1244 is_deferred_app_route(route_key.as_str(), deferred_entries)
1245}
1246
1247fn should_include_endpoint_group(
1248 write_phase: EntrypointsWritePhase,
1249 key: &EndpointGroupKey,
1250 deferred_entries: &[RcStr],
1251) -> bool {
1252 let is_deferred = is_deferred_endpoint_group(key, deferred_entries);
1253
1254 match write_phase {
1255 EntrypointsWritePhase::All => true,
1256 EntrypointsWritePhase::NonDeferred => !is_deferred,
1257 EntrypointsWritePhase::Deferred => is_deferred,
1258 }
1259}
1260
1261async fn app_route_filter_for_write_phase(
1262 project: Vc<Project>,
1263 write_phase: EntrypointsWritePhase,
1264 deferred_entries: &[RcStr],
1265) -> Result<Option<Vec<RcStr>>> {
1266 if matches!(write_phase, EntrypointsWritePhase::All) || deferred_entries.is_empty() {
1267 return Ok(None);
1268 }
1269
1270 let include_deferred = write_phase == EntrypointsWritePhase::Deferred;
1271 let app_project = project.app_project().await?;
1272 let app_route_keys = if let Some(app_project) = &*app_project {
1273 app_project
1274 .route_keys()
1275 .await?
1276 .iter()
1277 .cloned()
1278 .collect::<Vec<_>>()
1279 } else {
1280 Vec::new()
1281 };
1282
1283 Ok(Some(
1284 app_route_keys
1285 .iter()
1286 .filter(|route| {
1287 is_deferred_app_route(route.as_str(), deferred_entries) == include_deferred
1288 })
1289 .cloned()
1290 .collect::<Vec<_>>(),
1291 ))
1292}
1293
1294#[tracing::instrument(level = "info", name = "write all entrypoints to disk", skip_all)]
1295#[napi]
1296pub async fn project_write_all_entrypoints_to_disk(
1297 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
1298 app_dir_only: bool,
1299) -> napi::Result<TurbopackResult<Option<NapiEntrypoints>>> {
1300 let ctx = &project.turbopack_ctx;
1301 let container = project.container;
1302 let tt = ctx.turbo_tasks();
1303
1304 #[turbo_tasks::function(operation)]
1305 async fn has_deferred_entrypoints_operation(
1306 container: ResolvedVc<ProjectContainer>,
1307 ) -> Result<Vc<bool>> {
1308 let project = container.project();
1309 let deferred_entries = project.deferred_entries().owned().await?;
1310
1311 if deferred_entries.is_empty() {
1312 return Ok(Vc::cell(false));
1313 }
1314
1315 let app_project = project.app_project().await?;
1316 let has_deferred = if let Some(app_project) = &*app_project {
1317 app_project
1318 .route_keys()
1319 .await?
1320 .iter()
1321 .any(|route_key| is_deferred_app_route(route_key.as_str(), &deferred_entries))
1322 } else {
1323 false
1324 };
1325
1326 Ok(Vc::cell(has_deferred))
1327 }
1328
1329 let has_deferred_entrypoints = tt
1330 .run(async move {
1331 Ok(*has_deferred_entrypoints_operation(container)
1332 .read_strongly_consistent()
1333 .await?)
1334 })
1335 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1336 .await?;
1337
1338 let phase_build_paths = if has_deferred_entrypoints {
1339 Some(
1340 tt.run(async move {
1341 #[turbo_tasks::value]
1342 struct DeferredEntrypointInfo(ReadRef<Entrypoints>, ReadRef<Vec<RcStr>>);
1343
1344 #[turbo_tasks::function(operation)]
1345 async fn deferred_entrypoint_info_operation(
1346 container: ResolvedVc<ProjectContainer>,
1347 ) -> Result<Vc<DeferredEntrypointInfo>> {
1348 let project = container.project();
1349 Ok(DeferredEntrypointInfo(
1350 project.entrypoints().await?,
1351 project.deferred_entries().await?,
1352 )
1353 .cell())
1354 }
1355
1356 let DeferredEntrypointInfo(entrypoints, deferred_entries) =
1357 &*deferred_entrypoint_info_operation(container)
1358 .read_strongly_consistent()
1359 .await?;
1360
1361 Ok(compute_deferred_phase_build_paths(
1362 entrypoints,
1363 deferred_entries,
1364 ))
1365 })
1366 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1367 .await?,
1368 )
1369 } else {
1370 None
1371 };
1372
1373 if let Some(phase_build_paths) = phase_build_paths.as_ref() {
1374 let non_deferred_build_paths = phase_build_paths.non_deferred.clone();
1375 tt.run(async move {
1376 container
1377 .update(PartialProjectOptions {
1378 debug_build_paths: Some(non_deferred_build_paths),
1379 ..Default::default()
1380 })
1381 .await?;
1382 Ok(())
1383 })
1384 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1385 .await?;
1386 }
1387
1388 let first_phase = if has_deferred_entrypoints {
1389 EntrypointsWritePhase::NonDeferred
1390 } else {
1391 EntrypointsWritePhase::All
1392 };
1393
1394 let (mut entrypoints, mut issues, mut diags) = tt
1395 .run(async move {
1396 let entrypoints_with_issues_op = get_all_written_entrypoints_with_issues_operation(
1397 container,
1398 app_dir_only,
1399 first_phase,
1400 );
1401
1402 let AllWrittenEntrypointsWithIssues {
1404 entrypoints,
1405 issues,
1406 diagnostics,
1407 effects,
1408 } = &*entrypoints_with_issues_op
1409 .read_strongly_consistent()
1410 .await?;
1411
1412 effects.apply().await?;
1414
1415 Ok((
1416 entrypoints.clone(),
1417 issues.iter().cloned().collect::<Vec<_>>(),
1418 diagnostics.iter().cloned().collect::<Vec<_>>(),
1419 ))
1420 })
1421 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1422 .await?;
1423
1424 if has_deferred_entrypoints {
1425 ctx.on_before_deferred_entries().await?;
1426
1427 let deferred_invalidation_dirs = phase_build_paths
1431 .as_ref()
1432 .map(|paths| paths.deferred_invalidation_dirs.clone())
1433 .unwrap_or_default();
1434
1435 tt.run(async move {
1436 invalidate_deferred_entry_source_dirs_after_callback(
1437 container,
1438 deferred_invalidation_dirs,
1439 )
1440 .await?;
1441 Ok(())
1442 })
1443 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1444 .await?;
1445
1446 if let Some(phase_build_paths) = phase_build_paths.as_ref() {
1447 let all_build_paths = phase_build_paths.all.clone();
1448 tt.run(async move {
1449 container
1450 .update(PartialProjectOptions {
1451 debug_build_paths: Some(all_build_paths),
1452 ..Default::default()
1453 })
1454 .await?;
1455 Ok(())
1456 })
1457 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1458 .await?;
1459 }
1460
1461 let (deferred_entrypoints, deferred_issues, deferred_diags) = tt
1462 .run(async move {
1463 let entrypoints_with_issues_op = get_all_written_entrypoints_with_issues_operation(
1464 container,
1465 app_dir_only,
1466 EntrypointsWritePhase::Deferred,
1467 );
1468
1469 let AllWrittenEntrypointsWithIssues {
1470 entrypoints,
1471 issues,
1472 diagnostics,
1473 effects,
1474 } = &*entrypoints_with_issues_op
1475 .read_strongly_consistent()
1476 .await?;
1477
1478 effects.apply().await?;
1480
1481 Ok((
1482 entrypoints.clone(),
1483 issues.iter().cloned().collect::<Vec<_>>(),
1484 diagnostics.iter().cloned().collect::<Vec<_>>(),
1485 ))
1486 })
1487 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1488 .await?;
1489
1490 if deferred_entrypoints.is_some() {
1491 entrypoints = deferred_entrypoints;
1492 }
1493 issues.extend(deferred_issues);
1494 diags.extend(deferred_diags);
1495 }
1496
1497 let (emit_issues, emit_diags) = tt
1498 .run(async move {
1499 let emit_result_op = emit_all_output_assets_once_with_issues_operation(
1500 container,
1501 app_dir_only,
1502 has_deferred_entrypoints,
1503 );
1504 let OperationResult {
1505 issues,
1506 diagnostics,
1507 effects,
1508 } = &*emit_result_op.read_strongly_consistent().await?;
1509
1510 effects.apply().await?;
1511
1512 Ok((
1513 issues.iter().cloned().collect::<Vec<_>>(),
1514 diagnostics.iter().cloned().collect::<Vec<_>>(),
1515 ))
1516 })
1517 .or_else(|e| ctx.throw_turbopack_internal_result(&e.into()))
1518 .await?;
1519
1520 issues.extend(emit_issues);
1521 diags.extend(emit_diags);
1522
1523 Ok(TurbopackResult {
1524 result: if let Some(entrypoints) = entrypoints {
1525 Some(NapiEntrypoints::from_entrypoints_op(
1526 &entrypoints,
1527 &project.turbopack_ctx,
1528 )?)
1529 } else {
1530 None
1531 },
1532 issues: issues.iter().map(|i| NapiIssue::from(&**i)).collect(),
1533 diagnostics: diags.iter().map(|d| NapiDiagnostic::from(d)).collect(),
1534 })
1535}
1536
1537#[turbo_tasks::function(operation)]
1538async fn get_all_written_entrypoints_with_issues_operation(
1539 container: ResolvedVc<ProjectContainer>,
1540 app_dir_only: bool,
1541 write_phase: EntrypointsWritePhase,
1542) -> Result<Vc<AllWrittenEntrypointsWithIssues>> {
1543 let entrypoints_operation = EntrypointsOperation::new(all_entrypoints_write_to_disk_operation(
1544 container,
1545 app_dir_only,
1546 write_phase,
1547 ));
1548 let filter = issue_filter_from_container(container);
1549 let (entrypoints, issues, diagnostics, effects) =
1550 strongly_consistent_catch_collectables(entrypoints_operation, filter).await?;
1551 Ok(AllWrittenEntrypointsWithIssues {
1552 entrypoints,
1553 issues,
1554 diagnostics,
1555 effects,
1556 }
1557 .cell())
1558}
1559
1560#[turbo_tasks::function(operation)]
1561pub async fn all_entrypoints_write_to_disk_operation(
1562 project: ResolvedVc<ProjectContainer>,
1563 app_dir_only: bool,
1564 write_phase: EntrypointsWritePhase,
1565) -> Result<Vc<Entrypoints>> {
1566 let output_assets_operation = output_assets_operation(project, app_dir_only, write_phase);
1568 let _ = output_assets_operation.connect().await?;
1569
1570 Ok(project.entrypoints())
1571}
1572
1573#[turbo_tasks::function(operation)]
1574async fn output_assets_for_single_emit_operation(
1575 container: ResolvedVc<ProjectContainer>,
1576 app_dir_only: bool,
1577 has_deferred_entrypoints: bool,
1578) -> Result<Vc<OutputAssets>> {
1579 if !has_deferred_entrypoints {
1580 return Ok(
1581 output_assets_operation(container, app_dir_only, EntrypointsWritePhase::All).connect(),
1582 );
1583 }
1584
1585 let non_deferred_output_assets =
1586 output_assets_operation(container, app_dir_only, EntrypointsWritePhase::NonDeferred)
1587 .connect()
1588 .await?;
1589 let deferred_output_assets =
1590 output_assets_operation(container, app_dir_only, EntrypointsWritePhase::Deferred)
1591 .connect()
1592 .await?;
1593
1594 let merged_output_assets: FxIndexSet<ResolvedVc<Box<dyn OutputAsset>>> =
1595 non_deferred_output_assets
1596 .iter()
1597 .chain(deferred_output_assets.iter())
1598 .copied()
1599 .collect();
1600
1601 Ok(Vc::cell(merged_output_assets.into_iter().collect()))
1602}
1603
1604#[turbo_tasks::function(operation)]
1605async fn emit_all_output_assets_once_operation(
1606 container: ResolvedVc<ProjectContainer>,
1607 app_dir_only: bool,
1608 has_deferred_entrypoints: bool,
1609) -> Result<Vc<Entrypoints>> {
1610 let output_assets_operation =
1611 output_assets_for_single_emit_operation(container, app_dir_only, has_deferred_entrypoints);
1612 container
1613 .project()
1614 .emit_all_output_assets(output_assets_operation)
1615 .as_side_effect()
1616 .await?;
1617
1618 Ok(container.entrypoints())
1619}
1620
1621#[turbo_tasks::function(operation)]
1622async fn emit_all_output_assets_once_with_issues_operation(
1623 container: ResolvedVc<ProjectContainer>,
1624 app_dir_only: bool,
1625 has_deferred_entrypoints: bool,
1626) -> Result<Vc<OperationResult>> {
1627 let entrypoints_operation = EntrypointsOperation::new(emit_all_output_assets_once_operation(
1628 container,
1629 app_dir_only,
1630 has_deferred_entrypoints,
1631 ));
1632 let filter = issue_filter_from_container(container);
1633 let (_, issues, diagnostics, effects) =
1634 strongly_consistent_catch_collectables(entrypoints_operation, filter).await?;
1635
1636 Ok(OperationResult {
1637 issues,
1638 diagnostics,
1639 effects,
1640 }
1641 .cell())
1642}
1643
1644#[turbo_tasks::function(operation)]
1645async fn output_assets_operation(
1646 container: ResolvedVc<ProjectContainer>,
1647 app_dir_only: bool,
1648 write_phase: EntrypointsWritePhase,
1649) -> Result<Vc<OutputAssets>> {
1650 let project = container.project();
1651 let deferred_entries = project.deferred_entries().owned().await?;
1652 let app_route_filter =
1653 app_route_filter_for_write_phase(project, write_phase, &deferred_entries).await?;
1654
1655 let endpoint_groups = project
1656 .get_all_endpoint_groups_with_app_route_filter(app_dir_only, app_route_filter)
1657 .await?;
1658
1659 let endpoints = endpoint_groups
1660 .iter()
1661 .filter(|(key, _)| should_include_endpoint_group(write_phase, key, &deferred_entries))
1662 .flat_map(|(_, group)| {
1663 group
1664 .primary
1665 .iter()
1666 .chain(group.additional.iter())
1667 .map(|entry| entry.endpoint)
1668 })
1669 .collect::<Vec<_>>();
1670
1671 let endpoint_assets = endpoints
1672 .iter()
1673 .map(|endpoint| async move { endpoint.output().await?.output_assets.await })
1674 .try_join()
1675 .await?;
1676
1677 let output_assets: FxIndexSet<ResolvedVc<Box<dyn OutputAsset>>> = endpoint_assets
1678 .iter()
1679 .flat_map(|assets| assets.iter().copied())
1680 .collect();
1681
1682 if write_phase == EntrypointsWritePhase::NonDeferred {
1683 return Ok(Vc::cell(output_assets.into_iter().collect()));
1684 }
1685
1686 let whole_app_module_graphs = project.whole_app_module_graphs();
1687 let nft = next_server_nft_assets(project).await?;
1688 let routes_hashes_manifest = routes_hashes_manifest_asset_if_enabled(project).await?;
1689 let immutable_hashes_manifest_asset =
1690 immutable_hashes_manifest_asset_if_enabled(project).await?;
1691
1692 whole_app_module_graphs.as_side_effect().await?;
1693
1694 Ok(Vc::cell(
1695 output_assets
1696 .into_iter()
1697 .chain(nft.iter().copied())
1698 .chain(routes_hashes_manifest.iter().copied())
1699 .chain(immutable_hashes_manifest_asset.iter().copied())
1700 .collect(),
1701 ))
1702}
1703
1704#[tracing::instrument(level = "info", name = "get entrypoints", skip_all)]
1705#[napi]
1706pub async fn project_entrypoints(
1707 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
1708) -> napi::Result<TurbopackResult<Option<NapiEntrypoints>>> {
1709 let container = project.container;
1710
1711 let (entrypoints, issues, diags) = project
1712 .turbopack_ctx
1713 .turbo_tasks()
1714 .run_once(async move {
1715 let entrypoints_with_issues_op = get_entrypoints_with_issues_operation(container);
1716
1717 let EntrypointsWithIssues {
1719 entrypoints,
1720 issues,
1721 diagnostics,
1722 effects: _,
1723 } = &*entrypoints_with_issues_op
1724 .read_strongly_consistent()
1725 .await?;
1726
1727 Ok((entrypoints.clone(), issues.clone(), diagnostics.clone()))
1728 })
1729 .await
1730 .map_err(|e| napi::Error::from_reason(PrettyPrintError(&e).to_string()))?;
1731
1732 let result = match entrypoints {
1733 Some(entrypoints) => Some(NapiEntrypoints::from_entrypoints_op(
1734 &entrypoints,
1735 &project.turbopack_ctx,
1736 )?),
1737 None => None,
1738 };
1739
1740 Ok(TurbopackResult {
1741 result,
1742 issues: issues.iter().map(|i| NapiIssue::from(&**i)).collect(),
1743 diagnostics: diags.iter().map(|d| NapiDiagnostic::from(d)).collect(),
1744 })
1745}
1746
1747#[tracing::instrument(level = "info", name = "subscribe to entrypoints", skip_all)]
1748#[napi(ts_return_type = "{ __napiType: \"RootTask\" }")]
1749pub fn project_entrypoints_subscribe(
1750 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
1751 func: JsFunction,
1752) -> napi::Result<External<RootTask>> {
1753 let turbopack_ctx = project.turbopack_ctx.clone();
1754 let container = project.container;
1755 subscribe(
1756 turbopack_ctx.clone(),
1757 func,
1758 move || {
1759 async move {
1760 let entrypoints_with_issues_op = get_entrypoints_with_issues_operation(container);
1761 let EntrypointsWithIssues {
1762 entrypoints,
1763 issues,
1764 diagnostics,
1765 effects,
1766 } = &*entrypoints_with_issues_op
1767 .read_strongly_consistent()
1768 .await?;
1769
1770 effects.apply().await?;
1771 Ok((entrypoints.clone(), issues.clone(), diagnostics.clone()))
1772 }
1773 .instrument(tracing::info_span!("entrypoints subscription"))
1774 },
1775 move |ctx| {
1776 let (entrypoints, issues, diags) = ctx.value;
1777 let result = match entrypoints {
1778 Some(entrypoints) => Some(NapiEntrypoints::from_entrypoints_op(
1779 &entrypoints,
1780 &turbopack_ctx,
1781 )?),
1782 None => None,
1783 };
1784
1785 Ok(vec![TurbopackResult {
1786 result,
1787 issues: issues
1788 .iter()
1789 .map(|issue| NapiIssue::from(&**issue))
1790 .collect(),
1791 diagnostics: diags.iter().map(|d| NapiDiagnostic::from(d)).collect(),
1792 }])
1793 },
1794 )
1795}
1796
1797#[turbo_tasks::value(serialization = "none")]
1798struct HmrUpdateWithIssues {
1799 update: ReadRef<Update>,
1800 issues: Arc<Vec<ReadRef<PlainIssue>>>,
1801 diagnostics: Arc<Vec<ReadRef<PlainDiagnostic>>>,
1802 effects: Arc<Effects>,
1803}
1804
1805#[turbo_tasks::function(operation)]
1806fn project_hmr_update_operation(
1807 project: ResolvedVc<Project>,
1808 chunk_name: RcStr,
1809 target: HmrTarget,
1810 state: ResolvedVc<VersionState>,
1811) -> Vc<Update> {
1812 project.hmr_update(chunk_name, target, *state)
1813}
1814
1815#[turbo_tasks::function(operation)]
1816async fn hmr_update_with_issues_operation(
1817 project: ResolvedVc<Project>,
1818 chunk_name: RcStr,
1819 state: ResolvedVc<VersionState>,
1820 target: HmrTarget,
1821) -> Result<Vc<HmrUpdateWithIssues>> {
1822 let update_op = project_hmr_update_operation(project, chunk_name, target, state);
1823 let update = update_op.read_strongly_consistent().await?;
1824 let filter = project.issue_filter();
1825 let issues = get_issues(update_op, filter).await?;
1826 let diagnostics = get_diagnostics(update_op).await?;
1827 let effects = Arc::new(get_effects(update_op).await?);
1828 Ok(HmrUpdateWithIssues {
1829 update,
1830 issues,
1831 diagnostics,
1832 effects,
1833 }
1834 .cell())
1835}
1836
1837#[tracing::instrument(level = "info", name = "get HMR events", skip(project, func), fields(target = %target))]
1838#[napi(ts_return_type = "{ __napiType: \"RootTask\" }")]
1839pub fn project_hmr_events(
1840 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
1841 chunk_name: RcStr,
1842 target: String,
1843 func: JsFunction,
1844) -> napi::Result<External<RootTask>> {
1845 let hmr_target = target
1846 .parse::<HmrTarget>()
1847 .map_err(napi::Error::from_reason)?;
1848
1849 let container = project.container;
1850 let session = TransientInstance::new(());
1851 subscribe(
1852 project.turbopack_ctx.clone(),
1853 func,
1854 {
1855 let outer_chunk_name = chunk_name.clone();
1856 let session = session.clone();
1857 move || {
1858 let chunk_name: RcStr = outer_chunk_name.clone();
1859 let session = session.clone();
1860 async move {
1861 let project = container.project().to_resolved().await?;
1862 let state = project
1863 .hmr_version_state(chunk_name.clone(), hmr_target, session)
1864 .to_resolved()
1865 .await?;
1866
1867 let update_op = hmr_update_with_issues_operation(
1868 project,
1869 chunk_name.clone(),
1870 state,
1871 hmr_target,
1872 );
1873 let update = update_op.read_strongly_consistent().await?;
1874 let HmrUpdateWithIssues {
1875 update,
1876 issues,
1877 diagnostics,
1878 effects,
1879 } = &*update;
1880 effects.apply().await?;
1881 match &**update {
1882 Update::Missing | Update::None => {}
1883 Update::Total(TotalUpdate { to }) => {
1884 state.set(to.clone()).await?;
1885 }
1886 Update::Partial(PartialUpdate { to, .. }) => {
1887 state.set(to.clone()).await?;
1888 }
1889 }
1890 Ok((Some(update.clone()), issues.clone(), diagnostics.clone()))
1891 }
1892 }
1893 },
1894 move |ctx| {
1895 let (update, issues, diags) = ctx.value;
1896
1897 let napi_issues = issues
1898 .iter()
1899 .map(|issue| NapiIssue::from(&**issue))
1900 .collect();
1901 let update_issues = issues
1902 .iter()
1903 .map(|issue| Issue::from(&**issue))
1904 .collect::<Vec<_>>();
1905
1906 let identifier = ResourceIdentifier {
1907 path: chunk_name.clone(),
1908 headers: None,
1909 };
1910 let update = match update.as_deref() {
1911 None | Some(Update::Missing) | Some(Update::Total(_)) => {
1912 ClientUpdateInstruction::restart(&identifier, &update_issues)
1913 }
1914 Some(Update::Partial(update)) => ClientUpdateInstruction::partial(
1915 &identifier,
1916 &update.instruction,
1917 &update_issues,
1918 ),
1919 Some(Update::None) => ClientUpdateInstruction::issues(&identifier, &update_issues),
1920 };
1921
1922 Ok(vec![TurbopackResult {
1923 result: ctx.env.to_js_value(&update)?,
1924 issues: napi_issues,
1925 diagnostics: diags.iter().map(|d| NapiDiagnostic::from(d)).collect(),
1926 }])
1927 },
1928 )
1929}
1930
1931#[napi(object)]
1932struct HmrChunkNames {
1933 pub chunk_names: Vec<RcStr>,
1934}
1935
1936#[turbo_tasks::value(serialization = "none")]
1937struct HmrChunkNamesWithIssues {
1938 chunk_names: ReadRef<Vec<RcStr>>,
1939 issues: Arc<Vec<ReadRef<PlainIssue>>>,
1940 diagnostics: Arc<Vec<ReadRef<PlainDiagnostic>>>,
1941 effects: Arc<Effects>,
1942}
1943
1944#[turbo_tasks::function(operation)]
1945fn project_hmr_chunk_names_operation(
1946 container: ResolvedVc<ProjectContainer>,
1947 target: HmrTarget,
1948) -> Vc<Vec<RcStr>> {
1949 container.hmr_chunk_names(target)
1950}
1951
1952#[turbo_tasks::function(operation)]
1953async fn get_hmr_chunk_names_with_issues_operation(
1954 container: ResolvedVc<ProjectContainer>,
1955 target: HmrTarget,
1956) -> Result<Vc<HmrChunkNamesWithIssues>> {
1957 let hmr_chunk_names_op = project_hmr_chunk_names_operation(container, target);
1958 let hmr_chunk_names = hmr_chunk_names_op.read_strongly_consistent().await?;
1959 let filter = issue_filter_from_container(container);
1960 let issues = get_issues(hmr_chunk_names_op, filter).await?;
1961 let diagnostics = get_diagnostics(hmr_chunk_names_op).await?;
1962 let effects = Arc::new(get_effects(hmr_chunk_names_op).await?);
1963 Ok(HmrChunkNamesWithIssues {
1964 chunk_names: hmr_chunk_names,
1965 issues,
1966 diagnostics,
1967 effects,
1968 }
1969 .cell())
1970}
1971
1972#[tracing::instrument(level = "info", name = "get HMR chunk names", skip(project, func), fields(target = %target))]
1973#[napi(ts_return_type = "{ __napiType: \"RootTask\" }")]
1974pub fn project_hmr_chunk_names_subscribe(
1975 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
1976 target: String,
1977 func: JsFunction,
1978) -> napi::Result<External<RootTask>> {
1979 let hmr_target = target
1980 .parse::<HmrTarget>()
1981 .map_err(napi::Error::from_reason)?;
1982
1983 let container = project.container;
1984 subscribe(
1985 project.turbopack_ctx.clone(),
1986 func,
1987 move || async move {
1988 let hmr_chunk_names_with_issues_op =
1989 get_hmr_chunk_names_with_issues_operation(container, hmr_target);
1990 let HmrChunkNamesWithIssues {
1991 chunk_names,
1992 issues,
1993 diagnostics,
1994 effects,
1995 } = &*hmr_chunk_names_with_issues_op
1996 .read_strongly_consistent()
1997 .await?;
1998 effects.apply().await?;
1999
2000 Ok((chunk_names.clone(), issues.clone(), diagnostics.clone()))
2001 },
2002 move |ctx| {
2003 let (chunk_names, issues, diagnostics) = ctx.value;
2004
2005 Ok(vec![TurbopackResult {
2006 result: HmrChunkNames {
2007 chunk_names: ReadRef::into_owned(chunk_names),
2008 },
2009 issues: issues
2010 .iter()
2011 .map(|issue| NapiIssue::from(&**issue))
2012 .collect(),
2013 diagnostics: diagnostics
2014 .iter()
2015 .map(|d| NapiDiagnostic::from(d))
2016 .collect(),
2017 }])
2018 },
2019 )
2020}
2021
2022pub enum UpdateMessage {
2023 Start,
2024 End(UpdateInfo),
2025}
2026
2027#[napi(object)]
2028struct NapiUpdateMessage {
2029 pub update_type: &'static str,
2030 pub value: Option<NapiUpdateInfo>,
2031}
2032
2033impl From<UpdateMessage> for NapiUpdateMessage {
2034 fn from(update_message: UpdateMessage) -> Self {
2035 match update_message {
2036 UpdateMessage::Start => NapiUpdateMessage {
2037 update_type: "start",
2038 value: None,
2039 },
2040 UpdateMessage::End(info) => NapiUpdateMessage {
2041 update_type: "end",
2042 value: Some(info.into()),
2043 },
2044 }
2045 }
2046}
2047
2048#[napi(object)]
2049struct NapiUpdateInfo {
2050 pub duration: u32,
2051 pub tasks: u32,
2052}
2053
2054impl From<UpdateInfo> for NapiUpdateInfo {
2055 fn from(update_info: UpdateInfo) -> Self {
2056 Self {
2057 duration: update_info.duration.as_millis() as u32,
2058 tasks: update_info.tasks as u32,
2059 }
2060 }
2061}
2062
2063#[napi]
2075pub fn project_update_info_subscribe(
2076 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
2077 aggregation_ms: u32,
2078 func: JsFunction,
2079) -> napi::Result<()> {
2080 let func: ThreadsafeFunction<UpdateMessage> = func.create_threadsafe_function(0, |ctx| {
2081 let message = ctx.value;
2082 Ok(vec![NapiUpdateMessage::from(message)])
2083 })?;
2084 tokio::spawn(async move {
2085 let tt = project.turbopack_ctx.turbo_tasks();
2086 loop {
2087 let update_info = tt
2088 .aggregated_update_info(Duration::ZERO, Duration::ZERO)
2089 .await;
2090
2091 func.call(
2092 Ok(UpdateMessage::Start),
2093 ThreadsafeFunctionCallMode::NonBlocking,
2094 );
2095
2096 let update_info = match update_info {
2097 Some(update_info) => update_info,
2098 None => {
2099 tt.get_or_wait_aggregated_update_info(Duration::from_millis(
2100 aggregation_ms.into(),
2101 ))
2102 .await
2103 }
2104 };
2105
2106 let status = func.call(
2107 Ok(UpdateMessage::End(update_info)),
2108 ThreadsafeFunctionCallMode::NonBlocking,
2109 );
2110
2111 if !matches!(status, Status::Ok) {
2112 let error = anyhow!("Error calling JS function: {}", status);
2113 eprintln!("{error}");
2114 break;
2115 }
2116 }
2117 });
2118 Ok(())
2119}
2120
2121#[napi]
2123pub fn project_compilation_events_subscribe(
2124 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
2125 func: JsFunction,
2126 event_types: Option<Vec<String>>,
2127) -> napi::Result<()> {
2128 let tsfn: ThreadsafeFunction<Arc<dyn CompilationEvent>> =
2129 func.create_threadsafe_function(0, |ctx| {
2130 let event: Arc<dyn CompilationEvent> = ctx.value;
2131
2132 let env = ctx.env;
2133 let mut obj = env.create_object()?;
2134 obj.set_named_property("typeName", event.type_name())?;
2135 obj.set_named_property("severity", event.severity().to_string())?;
2136 obj.set_named_property("message", event.message())?;
2137 obj.set_named_property("eventJson", event.to_json())?;
2138
2139 let external = env.create_external(event, None);
2140 obj.set_named_property("eventData", external)?;
2141
2142 Ok(vec![obj])
2143 })?;
2144
2145 tokio::spawn(async move {
2146 let tt = project.turbopack_ctx.turbo_tasks();
2147 let mut receiver = tt.subscribe_to_compilation_events(event_types);
2148 while let Some(msg) = receiver.recv().await {
2149 let status = tsfn.call(Ok(msg), ThreadsafeFunctionCallMode::Blocking);
2150
2151 if status != Status::Ok {
2152 break;
2153 }
2154 }
2155 });
2156
2157 Ok(())
2158}
2159
2160#[napi(object)]
2161#[derive(
2162 Clone,
2163 Debug,
2164 Eq,
2165 Hash,
2166 NonLocalValue,
2167 OperationValue,
2168 PartialEq,
2169 TaskInput,
2170 TraceRawVcs,
2171 Encode,
2172 Decode,
2173)]
2174pub struct StackFrame {
2175 pub is_server: bool,
2176 pub is_ignored: Option<bool>,
2177 pub original_file: Option<RcStr>,
2178 pub file: RcStr,
2179 pub line: Option<u32>,
2181 pub column: Option<u32>,
2183 pub method_name: Option<RcStr>,
2184}
2185
2186#[turbo_tasks::value(transparent)]
2187#[derive(Clone)]
2188pub struct OptionStackFrame(Option<StackFrame>);
2189
2190#[turbo_tasks::function]
2191pub async fn get_source_map_rope(
2192 container: Vc<ProjectContainer>,
2193 source_url: RcStr,
2194) -> Result<Vc<FileContent>> {
2195 let (file_path_sys, module) = match Url::parse(&source_url) {
2196 Ok(url) => match url.scheme() {
2197 "file" => {
2198 let path = match url.to_file_path() {
2199 Ok(path) => path.to_string_lossy().into(),
2200 Err(_) => {
2201 bail!("Failed to convert file URL to file path: {url}");
2202 }
2203 };
2204 let module = url.query_pairs().find(|(k, _)| k == "id");
2205 (
2206 path,
2207 match module {
2208 Some(module) => Some(urlencoding::decode(&module.1)?.into_owned().into()),
2209 None => None,
2210 },
2211 )
2212 }
2213 _ => bail!("Unknown url scheme '{}'", url.scheme()),
2214 },
2215 Err(_) => (source_url.to_string(), None),
2216 };
2217
2218 let chunk_base_unix =
2219 match file_path_sys.strip_prefix(container.project().dist_dir_absolute().await?.as_str()) {
2220 Some(relative_path) => sys_to_unix(relative_path),
2221 None => {
2222 return Ok(FileContent::NotFound.cell());
2224 }
2225 };
2226
2227 let server_path = container
2228 .project()
2229 .node_root()
2230 .await?
2231 .join(&chunk_base_unix)?;
2232
2233 let client_path = container
2234 .project()
2235 .client_relative_path()
2236 .await?
2237 .join(&chunk_base_unix)?;
2238
2239 let mut map = container.get_source_map(server_path, module.clone());
2240
2241 if !map.await?.is_content() {
2242 map = container.get_source_map(client_path, module);
2247 if !map.await?.is_content() {
2248 bail!("chunk/module '{}' is missing a sourcemap", source_url);
2249 }
2250 }
2251
2252 Ok(map)
2253}
2254
2255#[turbo_tasks::function(operation)]
2256pub fn get_source_map_rope_operation(
2257 container: ResolvedVc<ProjectContainer>,
2258 file_path: RcStr,
2259) -> Vc<FileContent> {
2260 get_source_map_rope(*container, file_path)
2261}
2262
2263#[turbo_tasks::function(operation)]
2264pub async fn project_trace_source_operation(
2265 container: ResolvedVc<ProjectContainer>,
2266 frame: StackFrame,
2267 current_directory_file_url: RcStr,
2268) -> Result<Vc<OptionStackFrame>> {
2269 let Some(map) =
2270 &*SourceMap::new_from_rope_cached(get_source_map_rope(*container, frame.file)).await?
2271 else {
2272 return Ok(Vc::cell(None));
2273 };
2274
2275 let Some(line) = frame.line else {
2276 return Ok(Vc::cell(None));
2277 };
2278
2279 let token = map.lookup_token(
2280 line.saturating_sub(1),
2281 frame.column.unwrap_or(1).saturating_sub(1),
2282 );
2283
2284 let (original_file, line, column, method_name, is_ignored) = match token {
2285 Token::Original(token) => (
2286 match urlencoding::decode(&token.original_file)? {
2287 Cow::Borrowed(_) => token.original_file,
2288 Cow::Owned(original_file) => RcStr::from(original_file),
2289 },
2290 Some(token.original_line + 1),
2292 Some(token.original_column + 1),
2293 token.name,
2294 token.is_ignored,
2295 ),
2296 Token::Synthetic(token) => {
2297 let Some(original_file) = token.guessed_original_file else {
2298 return Ok(Vc::cell(None));
2299 };
2300 (original_file, None, None, None, false)
2301 }
2302 };
2303
2304 let project_root_uri =
2305 uri_from_file(container.project().project_root_path().owned().await?, None).await? + "/";
2306 let (file, original_file) =
2307 if let Some(source_file) = original_file.strip_prefix(&project_root_uri) {
2308 (
2310 RcStr::from(
2311 get_relative_path_to(¤t_directory_file_url, &original_file)
2312 .trim_start_matches("./"),
2314 ),
2315 Some(RcStr::from(source_file)),
2316 )
2317 } else if let Some(source_file) = original_file.strip_prefix(&*SOURCE_MAP_PREFIX_PROJECT) {
2318 (
2321 RcStr::from(
2322 get_relative_path_to(
2323 ¤t_directory_file_url,
2324 &format!("{project_root_uri}{source_file}"),
2325 )
2326 .trim_start_matches("./"),
2328 ),
2329 Some(RcStr::from(source_file)),
2330 )
2331 } else if let Some(source_file) = original_file.strip_prefix(&*SOURCE_MAP_PREFIX) {
2332 (RcStr::from(source_file), None)
2334 } else {
2335 bail!(
2336 "Original file ({}) outside project ({})",
2337 original_file,
2338 project_root_uri
2339 )
2340 };
2341
2342 Ok(Vc::cell(Some(StackFrame {
2343 file,
2344 original_file,
2345 method_name,
2346 line,
2347 column,
2348 is_server: frame.is_server,
2349 is_ignored: Some(is_ignored),
2350 })))
2351}
2352
2353#[tracing::instrument(level = "info", name = "apply SourceMap to stack frame", skip_all)]
2354#[napi]
2355pub async fn project_trace_source(
2356 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
2357 frame: StackFrame,
2358 current_directory_file_url: String,
2359) -> napi::Result<Option<StackFrame>> {
2360 let container = project.container;
2361 let ctx = &project.turbopack_ctx;
2362 ctx.turbo_tasks()
2363 .run(async move {
2364 let traced_frame = project_trace_source_operation(
2365 container,
2366 frame,
2367 RcStr::from(current_directory_file_url),
2368 )
2369 .read_strongly_consistent()
2370 .await?;
2371 Ok(ReadRef::into_owned(traced_frame))
2372 })
2373 .await
2377 .map_err(|e| napi::Error::from_reason(PrettyPrintError(&e.into()).to_string()))
2378}
2379
2380#[tracing::instrument(level = "info", name = "get source content for asset", skip_all)]
2381#[napi]
2382pub async fn project_get_source_for_asset(
2383 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
2384 file_path: RcStr,
2385) -> napi::Result<Option<String>> {
2386 let container = project.container;
2387 let ctx = &project.turbopack_ctx;
2388 ctx.turbo_tasks()
2389 .run(async move {
2390 #[turbo_tasks::function(operation)]
2391 async fn source_content_operation(
2392 container: ResolvedVc<ProjectContainer>,
2393 file_path: RcStr,
2394 ) -> Result<Vc<FileContent>> {
2395 let project_path = container.project().project_path().await?;
2396 Ok(project_path.fs().root().await?.join(&file_path)?.read())
2397 }
2398
2399 let source_content = &*source_content_operation(container, file_path.clone())
2400 .read_strongly_consistent()
2401 .await?;
2402
2403 let FileContent::Content(source_content) = source_content else {
2404 bail!("Cannot find source for asset {}", file_path);
2405 };
2406
2407 Ok(Some(source_content.content().to_str()?.into_owned()))
2408 })
2409 .await
2413 .map_err(|e| napi::Error::from_reason(PrettyPrintError(&e.into()).to_string()))
2414}
2415
2416#[tracing::instrument(level = "info", name = "get SourceMap for asset", skip_all)]
2417#[napi]
2418pub async fn project_get_source_map(
2419 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
2420 file_path: RcStr,
2421) -> napi::Result<Option<String>> {
2422 let container = project.container;
2423 let ctx = &project.turbopack_ctx;
2424 ctx.turbo_tasks()
2425 .run(async move {
2426 let source_map = get_source_map_rope_operation(container, file_path)
2427 .read_strongly_consistent()
2428 .await?;
2429 let Some(map) = source_map.as_content() else {
2430 return Ok(None);
2431 };
2432 Ok(Some(map.content().to_str()?.to_string()))
2433 })
2434 .await
2438 .map_err(|e| napi::Error::from_reason(PrettyPrintError(&e.into()).to_string()))
2439}
2440
2441#[napi]
2442pub fn project_get_source_map_sync(
2443 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
2444 file_path: RcStr,
2445) -> napi::Result<Option<String>> {
2446 within_runtime_if_available(|| {
2447 tokio::runtime::Handle::current().block_on(project_get_source_map(project, file_path))
2448 })
2449}
2450
2451#[napi]
2452pub async fn project_write_analyze_data(
2453 #[napi(ts_arg_type = "{ __napiType: \"Project\" }")] project: External<ProjectInstance>,
2454 app_dir_only: bool,
2455) -> napi::Result<TurbopackResult<()>> {
2456 let container = project.container;
2457 let (issues, diagnostics) = project
2458 .turbopack_ctx
2459 .turbo_tasks()
2460 .run_once(async move {
2461 let analyze_data_op = write_analyze_data_with_issues_operation(container, app_dir_only);
2462 let WriteAnalyzeResult {
2463 issues,
2464 diagnostics,
2465 effects,
2466 } = &*analyze_data_op.read_strongly_consistent().await?;
2467
2468 effects.apply().await?;
2470 Ok((issues.clone(), diagnostics.clone()))
2471 })
2472 .await
2473 .map_err(|e| napi::Error::from_reason(PrettyPrintError(&e).to_string()))?;
2474
2475 Ok(TurbopackResult {
2476 result: (),
2477 issues: issues.iter().map(|i| NapiIssue::from(&**i)).collect(),
2478 diagnostics: diagnostics
2479 .iter()
2480 .map(|d| NapiDiagnostic::from(d))
2481 .collect(),
2482 })
2483}