From 2b37600894df6bdb51df5eed542f498f35e5e940 Mon Sep 17 00:00:00 2001 From: hardfist Date: Wed, 14 Jan 2026 23:36:17 +0800 Subject: [PATCH 1/6] chore: add rspack_passes.md --- .../src/compilation/rspack_passes.md | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 crates/rspack_core/src/compilation/rspack_passes.md diff --git a/crates/rspack_core/src/compilation/rspack_passes.md b/crates/rspack_core/src/compilation/rspack_passes.md new file mode 100644 index 000000000000..49658e37d716 --- /dev/null +++ b/crates/rspack_core/src/compilation/rspack_passes.md @@ -0,0 +1,33 @@ +# Compilation Passes Architecture + +This document describes the modular architecture of the compilation process in rspack. + +## Overview + +The compilation process is organized into independent modules, each responsible for a specific phase or pass. This modular design improves code maintainability, testability, and allows for better separation of concerns. + +## Module Structure + +``` +compilation/ +├── mod.rs # Main Compilation struct which expose compilation api +|-- run_passes.rs # passes driver which calls other pass +├── build_module_graph/ # Module graph construction +|-- finish_module/ # Finish Module Graph Construction collect async_modules and dependencies_diagnostics +├── optimize_dependencies/ # optimization dependencies which includes collect side_effect_optimization info +├── build_chunk_graph/ # Chunk graph construction (code splitting) +|── optimize_modules/ # which includes optimize_modules and after_optimize_modules hooks +├── optimize_chunks/ # which includes optimize_chunks hooks +├── optimize_tree/ # which includes optimize_tree hooks +├── optimize_chunk_modules # which includes optimize_chunk_modules hooks +├── module_ids/ # Module ID assignment +├── chunk_ids/ # Chunk ID assignment +├── assign_runtime_ids/ # Runtime ID assignment +├── optimize_code_generation/ # Code generation optimization +├── create_module_hashes/ # Module hash computation +├── code_generation/ # Code generation for modules +├── runtime_requirements/ # Runtime requirements processing +├── create_hash/ # Chunk hash computation +├── create_chunk_assets/ # Asset creation +└── process_assets/ # Asset processing hooks +``` From ee2706872f8ba75d409423013d3292ebfaea5c5d Mon Sep 17 00:00:00 2001 From: hardfist Date: Thu, 15 Jan 2026 00:20:43 +0800 Subject: [PATCH 2/6] refactor: move pass into separate folder and drive by run_passes --- .../src/compilation/after_seal/mod.rs | 20 + .../src/compilation/assign_runtime_ids/mod.rs | 44 + .../src/compilation/build_chunk_graph/mod.rs | 1 + .../src/compilation/build_chunk_graph/pass.rs | 26 + .../src/compilation/chunk_ids/mod.rs | 37 + .../src/compilation/code_generation/mod.rs | 203 ++ .../compilation/create_chunk_assets/mod.rs | 171 ++ .../src/compilation/create_hash/mod.rs | 449 +++++ .../compilation/create_module_assets/mod.rs | 53 + .../compilation/create_module_hashes/mod.rs | 135 ++ .../src/compilation/finish_module/mod.rs | 175 ++ crates/rspack_core/src/compilation/mod.rs | 1783 +---------------- .../src/compilation/module_ids/mod.rs | 30 + .../compilation/optimize_chunk_modules/mod.rs | 16 + .../src/compilation/optimize_chunks/mod.rs | 19 + .../optimize_code_generation/mod.rs | 20 + .../compilation/optimize_dependencies/mod.rs | 41 + .../src/compilation/optimize_modules/mod.rs | 27 + .../src/compilation/optimize_tree/mod.rs | 15 + .../src/compilation/process_assets/mod.rs | 43 + .../src/compilation/rspack_passes.md | 60 +- .../rspack_core/src/compilation/run_passes.rs | 55 + .../compilation/runtime_requirements/mod.rs | 395 ++++ 23 files changed, 2035 insertions(+), 1783 deletions(-) create mode 100644 crates/rspack_core/src/compilation/after_seal/mod.rs create mode 100644 crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs create mode 100644 crates/rspack_core/src/compilation/build_chunk_graph/pass.rs create mode 100644 crates/rspack_core/src/compilation/chunk_ids/mod.rs create mode 100644 crates/rspack_core/src/compilation/code_generation/mod.rs create mode 100644 crates/rspack_core/src/compilation/create_chunk_assets/mod.rs create mode 100644 crates/rspack_core/src/compilation/create_hash/mod.rs create mode 100644 crates/rspack_core/src/compilation/create_module_assets/mod.rs create mode 100644 crates/rspack_core/src/compilation/create_module_hashes/mod.rs create mode 100644 crates/rspack_core/src/compilation/finish_module/mod.rs create mode 100644 crates/rspack_core/src/compilation/module_ids/mod.rs create mode 100644 crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs create mode 100644 crates/rspack_core/src/compilation/optimize_chunks/mod.rs create mode 100644 crates/rspack_core/src/compilation/optimize_code_generation/mod.rs create mode 100644 crates/rspack_core/src/compilation/optimize_dependencies/mod.rs create mode 100644 crates/rspack_core/src/compilation/optimize_modules/mod.rs create mode 100644 crates/rspack_core/src/compilation/optimize_tree/mod.rs create mode 100644 crates/rspack_core/src/compilation/process_assets/mod.rs create mode 100644 crates/rspack_core/src/compilation/run_passes.rs create mode 100644 crates/rspack_core/src/compilation/runtime_requirements/mod.rs diff --git a/crates/rspack_core/src/compilation/after_seal/mod.rs b/crates/rspack_core/src/compilation/after_seal/mod.rs new file mode 100644 index 000000000000..529bb632c315 --- /dev/null +++ b/crates/rspack_core/src/compilation/after_seal/mod.rs @@ -0,0 +1,20 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn after_seal_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("after seal"); + self.after_seal(plugin_driver).await?; + logger.time_end(start); + Ok(()) + } + + #[instrument("Compilation:after_seal", target=TRACING_BENCH_TARGET,skip_all)] + async fn after_seal(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + plugin_driver.compilation_hooks.after_seal.call(self).await + } +} diff --git a/crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs b/crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs new file mode 100644 index 000000000000..4ca5cbba70c4 --- /dev/null +++ b/crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs @@ -0,0 +1,44 @@ +use super::*; + +impl Compilation { + pub fn assign_runtime_ids(&mut self) { + fn process_entrypoint( + entrypoint_ukey: &ChunkGroupUkey, + chunk_group_by_ukey: &ChunkGroupByUkey, + chunk_by_ukey: &ChunkByUkey, + chunk_graph: &mut ChunkGraph, + ) { + let entrypoint = chunk_group_by_ukey.expect_get(entrypoint_ukey); + let runtime = entrypoint + .kind + .get_entry_options() + .and_then(|o| match &o.runtime { + Some(EntryRuntime::String(s)) => Some(s.to_owned()), + _ => None, + }) + .or(entrypoint.name().map(|n| n.to_string())); + if let (Some(runtime), Some(chunk)) = ( + runtime, + chunk_by_ukey.get(&entrypoint.get_runtime_chunk(chunk_group_by_ukey)), + ) { + chunk_graph.set_runtime_id(runtime, chunk.id().map(|id| id.to_string())); + } + } + for i in self.entrypoints.iter() { + process_entrypoint( + i.1, + &self.chunk_group_by_ukey, + &self.chunk_by_ukey, + &mut self.chunk_graph, + ) + } + for i in self.async_entrypoints.iter() { + process_entrypoint( + i, + &self.chunk_group_by_ukey, + &self.chunk_by_ukey, + &mut self.chunk_graph, + ) + } + } +} diff --git a/crates/rspack_core/src/compilation/build_chunk_graph/mod.rs b/crates/rspack_core/src/compilation/build_chunk_graph/mod.rs index a2a60e24b443..e64652e1f4e7 100644 --- a/crates/rspack_core/src/compilation/build_chunk_graph/mod.rs +++ b/crates/rspack_core/src/compilation/build_chunk_graph/mod.rs @@ -7,6 +7,7 @@ use crate::{Compilation, incremental::IncrementalPasses}; pub(crate) mod artifact; pub(crate) mod code_splitter; pub(crate) mod incremental; +pub(crate) mod pass; #[instrument("Compilation:build_chunk_graph", skip_all)] pub fn build_chunk_graph(compilation: &mut Compilation) -> rspack_error::Result<()> { diff --git a/crates/rspack_core/src/compilation/build_chunk_graph/pass.rs b/crates/rspack_core/src/compilation/build_chunk_graph/pass.rs new file mode 100644 index 000000000000..ea6660f342d7 --- /dev/null +++ b/crates/rspack_core/src/compilation/build_chunk_graph/pass.rs @@ -0,0 +1,26 @@ +use crate::compilation::build_chunk_graph::{ + artifact::use_code_splitting_cache, + build_chunk_graph, +}; +use crate::compilation::Compilation; +use crate::logger::Logger; +use rspack_error::Result; + +impl Compilation { + pub async fn build_chunk_graph_pass(&mut self) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + self.module_graph_cache_artifact.freeze(); + use_code_splitting_cache(self, |compilation| async { + let start = logger.time("rebuild chunk graph"); + build_chunk_graph(compilation)?; + compilation + .chunk_graph + .generate_dot(compilation, "after-code-splitting") + .await; + logger.time_end(start); + Ok(compilation) + }) + .await?; + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/chunk_ids/mod.rs b/crates/rspack_core/src/compilation/chunk_ids/mod.rs new file mode 100644 index 000000000000..7a8b7cda7a07 --- /dev/null +++ b/crates/rspack_core/src/compilation/chunk_ids/mod.rs @@ -0,0 +1,37 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn chunk_ids_pass(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("chunk ids"); + + // Check if CHUNK_IDS pass is disabled, and clear artifact if needed + if !self + .incremental + .passes_enabled(IncrementalPasses::CHUNK_IDS) + { + self.named_chunk_ids_artifact.clear(); + } + + let mut diagnostics = vec![]; + let mut chunk_by_ukey = mem::take(&mut self.chunk_by_ukey); + let mut named_chunk_ids_artifact = mem::take(&mut self.named_chunk_ids_artifact); + plugin_driver + .compilation_hooks + .chunk_ids + .call( + self, + &mut chunk_by_ukey, + &mut named_chunk_ids_artifact, + &mut diagnostics, + ) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.chunkIds"))?; + self.chunk_by_ukey = chunk_by_ukey; + self.named_chunk_ids_artifact = named_chunk_ids_artifact; + self.extend_diagnostics(diagnostics); + logger.time_end(start); + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/code_generation/mod.rs b/crates/rspack_core/src/compilation/code_generation/mod.rs new file mode 100644 index 000000000000..8cd5fc5e972f --- /dev/null +++ b/crates/rspack_core/src/compilation/code_generation/mod.rs @@ -0,0 +1,203 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn code_generation_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("code generation"); + let code_generation_modules = if let Some(mutations) = self + .incremental + .mutations_read(IncrementalPasses::MODULES_CODEGEN) + && !self.code_generation_results.is_empty() + { + let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ModuleRemove { module } => Some(*module), + _ => None, + }); + for revoked_module in revoked_modules { + self.code_generation_results.remove(&revoked_module); + } + let modules: IdentifierSet = mutations + .iter() + .filter_map(|mutation| match mutation { + Mutation::ModuleSetHashes { module } => Some(*module), + _ => None, + }) + .collect(); + // also cleanup for updated modules, for `insert(); insert();` the second insert() won't override the first insert() on code_generation_results + for module in &modules { + self.code_generation_results.remove(module); + } + tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_CODEGEN, %mutations); + let logger = self.get_logger("rspack.incremental.modulesCodegen"); + logger.log(format!( + "{} modules are affected, {} in total", + modules.len(), + self.get_module_graph().modules().len() + )); + modules + } else { + self.code_generation_results = Default::default(); + self.get_module_graph().modules().keys().copied().collect() + }; + self.code_generation(code_generation_modules).await?; + + let mut diagnostics = vec![]; + plugin_driver + .compilation_hooks + .after_code_generation + .call(self, &mut diagnostics) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterCodeGeneration"))?; + self.extend_diagnostics(diagnostics); + + logger.time_end(start); + Ok(()) + } + + #[instrument("Compilation:code_generation",target=TRACING_BENCH_TARGET, skip_all)] + async fn code_generation(&mut self, modules: IdentifierSet) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let mut codegen_cache_counter = match self.options.cache { + CacheOptions::Disabled => None, + _ => Some(logger.cache("module code generation cache")), + }; + + let module_graph = self.get_module_graph(); + let mut no_codegen_dependencies_modules = IdentifierSet::default(); + let mut has_codegen_dependencies_modules = IdentifierSet::default(); + for module_identifier in modules { + let module = module_graph + .module_by_identifier(&module_identifier) + .expect("should have module"); + if module.get_code_generation_dependencies().is_none() { + no_codegen_dependencies_modules.insert(module_identifier); + } else { + has_codegen_dependencies_modules.insert(module_identifier); + } + } + + self + .code_generation_modules(&mut codegen_cache_counter, no_codegen_dependencies_modules) + .await?; + self + .code_generation_modules(&mut codegen_cache_counter, has_codegen_dependencies_modules) + .await?; + + if let Some(counter) = codegen_cache_counter { + logger.cache_end(counter); + } + + Ok(()) + } + + pub(crate) async fn code_generation_modules( + &mut self, + cache_counter: &mut Option, + modules: IdentifierSet, + ) -> Result<()> { + let chunk_graph = &self.chunk_graph; + let module_graph = self.get_module_graph(); + let mut jobs = Vec::new(); + for module in modules { + let mut map: HashMap = HashMap::default(); + for runtime in chunk_graph.get_module_runtimes_iter(module, &self.chunk_by_ukey) { + let hash = ChunkGraph::get_module_hash(self, module, runtime) + .expect("should have cgm.hash in code generation"); + let scope = self + .plugin_driver + .compilation_hooks + .concatenation_scope + .call(self, module) + .await?; + if let Some(job) = map.get_mut(hash) { + job.runtimes.push(runtime.clone()); + } else { + map.insert( + hash.clone(), + CodeGenerationJob { + module, + hash: hash.clone(), + runtime: runtime.clone(), + runtimes: vec![runtime.clone()], + scope, + }, + ); + } + } + jobs.extend(map.into_values()); + } + + let results = rspack_futures::scope::<_, _>(|token| { + jobs.into_iter().for_each(|job| { + // SAFETY: await immediately and trust caller to poll future entirely + let s = unsafe { token.used((&self, &module_graph, job)) }; + + s.spawn(|(this, module_graph, job)| async { + let options = &this.options; + let old_cache = &this.old_cache; + + let module = module_graph + .module_by_identifier(&job.module) + .expect("should have module"); + let codegen_res = old_cache + .code_generate_occasion + .use_cache(&job, || async { + module + .code_generation(this, Some(&job.runtime), job.scope.clone()) + .await + .map(|mut codegen_res| { + codegen_res.set_hash( + &options.output.hash_function, + &options.output.hash_digest, + &options.output.hash_salt, + ); + codegen_res + }) + }) + .await; + + (job.module, job.runtimes, codegen_res) + }) + }) + }) + .await; + let results = results + .into_iter() + .map(|res| res.to_rspack_result()) + .collect::>>()?; + + for (module, runtimes, (codegen_res, from_cache)) in results { + if let Some(counter) = cache_counter { + if from_cache { + counter.hit(); + } else { + counter.miss(); + } + } + let codegen_res = match codegen_res { + Ok(codegen_res) => codegen_res, + Err(err) => { + let mut diagnostic = Diagnostic::from(err); + diagnostic.module_identifier = Some(module); + self.push_diagnostic(diagnostic); + let mut codegen_res = CodeGenerationResult::default(); + codegen_res.set_hash( + &self.options.output.hash_function, + &self.options.output.hash_digest, + &self.options.output.hash_salt, + ); + codegen_res + } + }; + self + .code_generation_results + .insert(module, codegen_res, runtimes); + self.code_generated_modules.insert(module); + } + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/create_chunk_assets/mod.rs b/crates/rspack_core/src/compilation/create_chunk_assets/mod.rs new file mode 100644 index 000000000000..d8d7d4074e41 --- /dev/null +++ b/crates/rspack_core/src/compilation/create_chunk_assets/mod.rs @@ -0,0 +1,171 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn create_chunk_assets_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("create chunk assets"); + self.create_chunk_assets(plugin_driver).await?; + logger.time_end(start); + Ok(()) + } + + #[instrument("Compilation::create_chunk_assets",target=TRACING_BENCH_TARGET, skip_all)] + async fn create_chunk_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + if (self.options.output.filename.has_hash_placeholder() + || self.options.output.chunk_filename.has_hash_placeholder() + || self.options.output.css_filename.has_hash_placeholder() + || self + .options + .output + .css_chunk_filename + .has_hash_placeholder()) + && let Some(diagnostic) = self.incremental.disable_passes( + IncrementalPasses::CHUNKS_RENDER, + "Chunk filename that dependent on full hash", + "chunk filename that dependent on full hash is not supported in incremental compilation", + ) + && let Some(diagnostic) = diagnostic + { + self.push_diagnostic(diagnostic); + } + + // Check if CHUNKS_RENDER pass is disabled, and clear artifact if needed + if !self + .incremental + .passes_enabled(IncrementalPasses::CHUNKS_RENDER) + { + self.chunk_render_artifact.clear(); + } + + let chunks = if let Some(mutations) = self + .incremental + .mutations_read(IncrementalPasses::CHUNKS_RENDER) + && !self.chunk_render_artifact.is_empty() + { + let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ChunkRemove { chunk } => Some(*chunk), + _ => None, + }); + for removed_chunk in removed_chunks { + self.chunk_render_artifact.remove(&removed_chunk); + } + self + .chunk_render_artifact + .retain(|chunk, _| self.chunk_by_ukey.contains(chunk)); + let chunks: UkeySet = mutations + .iter() + .filter_map(|mutation| match mutation { + Mutation::ChunkSetHashes { chunk } => Some(*chunk), + _ => None, + }) + .collect(); + tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::CHUNKS_RENDER, %mutations); + let logger = self.get_logger("rspack.incremental.chunksRender"); + logger.log(format!( + "{} chunks are affected, {} in total", + chunks.len(), + self.chunk_by_ukey.len() + )); + chunks + } else { + self.chunk_by_ukey.keys().copied().collect() + }; + let results = rspack_futures::scope::<_, Result<_>>(|token| { + chunks.iter().for_each(|chunk| { + // SAFETY: await immediately and trust caller to poll future entirely + let s = unsafe { token.used((&self, &plugin_driver, chunk)) }; + + s.spawn(|(this, plugin_driver, chunk)| async { + let mut manifests = Vec::new(); + let mut diagnostics = Vec::new(); + plugin_driver + .compilation_hooks + .render_manifest + .call(this, chunk, &mut manifests, &mut diagnostics) + .await?; + + rspack_error::Result::Ok(( + *chunk, + ChunkRenderResult { + manifests, + diagnostics, + }, + )) + }); + }) + }) + .await; + + let mut chunk_render_results: UkeyMap = Default::default(); + for result in results { + let item = result.to_rspack_result()?; + let (key, value) = item?; + chunk_render_results.insert(key, value); + } + let chunk_ukey_and_manifest = if self + .incremental + .passes_enabled(IncrementalPasses::CHUNKS_RENDER) + { + self.chunk_render_artifact.extend(chunk_render_results); + self.chunk_render_artifact.clone() + } else { + chunk_render_results + }; + + for ( + chunk_ukey, + ChunkRenderResult { + manifests, + diagnostics, + }, + ) in chunk_ukey_and_manifest + { + self.extend_diagnostics(diagnostics); + + for file_manifest in manifests { + let filename = file_manifest.filename; + let current_chunk = self.chunk_by_ukey.expect_get_mut(&chunk_ukey); + + current_chunk.set_rendered(true); + if file_manifest.auxiliary { + current_chunk.add_auxiliary_file(filename.clone()); + } else { + current_chunk.add_file(filename.clone()); + } + + self.emit_asset( + filename.clone(), + CompilationAsset::new(Some(file_manifest.source), file_manifest.info), + ); + + _ = self + .chunk_asset(chunk_ukey, &filename, plugin_driver.clone()) + .await; + } + } + + Ok(()) + } + + // #[instrument( + // name = "Compilation:chunk_asset", + // skip(self, plugin_driver, chunk_ukey) + // )] + async fn chunk_asset( + &self, + chunk_ukey: ChunkUkey, + filename: &str, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + plugin_driver + .compilation_hooks + .chunk_asset + .call(self, &chunk_ukey, filename) + .await?; + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/create_hash/mod.rs b/crates/rspack_core/src/compilation/create_hash/mod.rs new file mode 100644 index 000000000000..3c3b3abea142 --- /dev/null +++ b/crates/rspack_core/src/compilation/create_hash/mod.rs @@ -0,0 +1,449 @@ +use super::*; +use crate::logger::Logger; + +pub struct ChunkHashResult { + pub hash: RspackHashDigest, + pub content_hash: ChunkContentHash, +} + +impl Compilation { + pub async fn create_hash_pass(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("hashing"); + self.create_hash(plugin_driver).await?; + self.runtime_modules_code_generation().await?; + logger.time_end(start); + Ok(()) + } + + #[instrument(name = "Compilation:create_hash",target=TRACING_BENCH_TARGET, skip_all)] + pub async fn create_hash(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + + // Check if there are any chunks that depend on full hash, usually only runtime chunks are + // possible to depend on full hash, but for library type commonjs/module, it's possible to + // have non-runtime chunks depend on full hash, the library format plugin is using + // dependent_full_hash hook to declare it. + let mut full_hash_chunks = UkeySet::default(); + for chunk_ukey in self.chunk_by_ukey.keys() { + let chunk_dependent_full_hash = plugin_driver + .compilation_hooks + .dependent_full_hash + .call(self, chunk_ukey) + .await? + .unwrap_or_default(); + if chunk_dependent_full_hash { + full_hash_chunks.insert(*chunk_ukey); + } + } + if !full_hash_chunks.is_empty() + && let Some(diagnostic) = self.incremental.disable_passes( + IncrementalPasses::CHUNKS_HASHES, + "Chunk content that dependent on full hash", + "it requires calculating the hashes of all the chunks, which is a global effect", + ) + && let Some(diagnostic) = diagnostic + { + self.push_diagnostic(diagnostic); + } + if !self + .incremental + .passes_enabled(IncrementalPasses::CHUNKS_HASHES) + { + self.chunk_hashes_artifact.clear(); + } + + let create_hash_chunks = if let Some(mutations) = self + .incremental + .mutations_read(IncrementalPasses::CHUNKS_HASHES) + && !self.chunk_hashes_artifact.is_empty() + { + let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ChunkRemove { chunk } => Some(*chunk), + _ => None, + }); + for removed_chunk in removed_chunks { + self.chunk_hashes_artifact.remove(&removed_chunk); + } + self + .chunk_hashes_artifact + .retain(|chunk, _| self.chunk_by_ukey.contains(chunk)); + let chunks = mutations.get_affected_chunks_with_chunk_graph(self); + tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::CHUNKS_HASHES, %mutations, ?chunks); + let logger = self.get_logger("rspack.incremental.chunksHashes"); + logger.log(format!( + "{} chunks are affected, {} in total", + chunks.len(), + self.chunk_by_ukey.len(), + )); + chunks + } else { + self.chunk_by_ukey.keys().copied().collect() + }; + + let mut compilation_hasher = RspackHash::from(&self.options.output); + + fn try_process_chunk_hash_results( + compilation: &mut Compilation, + chunk_hash_results: Vec>, + ) -> Result<()> { + for hash_result in chunk_hash_results { + let (chunk_ukey, chunk_hash_result) = hash_result?; + let chunk = compilation.chunk_by_ukey.expect_get(&chunk_ukey); + let chunk_hashes_changed = chunk.set_hashes( + &mut compilation.chunk_hashes_artifact, + chunk_hash_result.hash, + chunk_hash_result.content_hash, + ); + if chunk_hashes_changed + && let Some(mut mutations) = compilation.incremental.mutations_write() + { + mutations.add(Mutation::ChunkSetHashes { chunk: chunk_ukey }); + } + } + Ok(()) + } + + let unordered_runtime_chunks: UkeySet = self.get_chunk_graph_entries().collect(); + let start = logger.time("hashing: hash chunks"); + let other_chunks: Vec<_> = create_hash_chunks + .iter() + .filter(|key| !unordered_runtime_chunks.contains(key)) + .collect(); + + // create hash for runtime modules in other chunks + let other_chunk_runtime_module_hashes = rspack_futures::scope::<_, Result<_>>(|token| { + other_chunks + .iter() + .flat_map(|chunk| self.chunk_graph.get_chunk_runtime_modules_iterable(chunk)) + .for_each(|runtime_module_identifier| { + let s = unsafe { token.used((&self, runtime_module_identifier)) }; + s.spawn(|(compilation, runtime_module_identifier)| async { + let runtime_module = &compilation.runtime_modules[runtime_module_identifier]; + let digest = runtime_module.get_runtime_hash(compilation, None).await?; + Ok((*runtime_module_identifier, digest)) + }); + }) + }) + .await + .into_iter() + .map(|res| res.to_rspack_result()) + .collect::>>()?; + + for res in other_chunk_runtime_module_hashes { + let (runtime_module_identifier, digest) = res?; + self + .runtime_modules_hash + .insert(runtime_module_identifier, digest); + } + + // create hash for other chunks + let other_chunks_hash_results = rspack_futures::scope::<_, Result<_>>(|token| { + for chunk in other_chunks { + let s = unsafe { token.used((&self, chunk, &plugin_driver)) }; + s.spawn(|(compilation, chunk, plugin_driver)| async { + let hash_result = compilation + .process_chunk_hash(*chunk, plugin_driver) + .await?; + Ok((*chunk, hash_result)) + }); + } + }) + .await + .into_iter() + .map(|res| res.to_rspack_result()) + .collect::>>()?; + + try_process_chunk_hash_results(self, other_chunks_hash_results)?; + logger.time_end(start); + + // collect references for runtime chunks + let mut runtime_chunks_map: HashMap, u32)> = + unordered_runtime_chunks + .into_iter() + .map(|runtime_chunk| (runtime_chunk, (Vec::new(), 0))) + .collect(); + let mut remaining: u32 = 0; + for runtime_chunk_ukey in runtime_chunks_map.keys().copied().collect::>() { + let runtime_chunk = self.chunk_by_ukey.expect_get(&runtime_chunk_ukey); + let groups = runtime_chunk.get_all_referenced_async_entrypoints(&self.chunk_group_by_ukey); + for other in groups + .into_iter() + .map(|group| self.chunk_group_by_ukey.expect_get(&group)) + .map(|group| group.get_runtime_chunk(&self.chunk_group_by_ukey)) + { + let (other_referenced_by, _) = runtime_chunks_map + .get_mut(&other) + .expect("should in runtime_chunks_map"); + other_referenced_by.push(runtime_chunk_ukey); + let info = runtime_chunks_map + .get_mut(&runtime_chunk_ukey) + .expect("should in runtime_chunks_map"); + info.1 += 1; + remaining += 1; + } + } + // sort runtime chunks by its references + let mut runtime_chunks = Vec::with_capacity(runtime_chunks_map.len()); + for (runtime_chunk, (_, remaining)) in &runtime_chunks_map { + if *remaining == 0 { + runtime_chunks.push(*runtime_chunk); + } + } + let mut ready_chunks = Vec::new(); + + let mut i = 0; + while i < runtime_chunks.len() { + let chunk_ukey = runtime_chunks[i]; + let has_full_hash_modules = full_hash_chunks.contains(&chunk_ukey) + || self + .chunk_graph + .has_chunk_full_hash_modules(&chunk_ukey, &self.runtime_modules); + if has_full_hash_modules { + full_hash_chunks.insert(chunk_ukey); + } + let referenced_by = runtime_chunks_map + .get(&chunk_ukey) + .expect("should in runtime_chunks_map") + .0 + .clone(); + for other in referenced_by { + if has_full_hash_modules { + for runtime_module in self.chunk_graph.get_chunk_runtime_modules_iterable(&other) { + let runtime_module = self + .runtime_modules + .get(runtime_module) + .expect("should have runtime_module"); + if runtime_module.dependent_hash() { + full_hash_chunks.insert(other); + break; + } + } + } + remaining -= 1; + let (_, other_remaining) = runtime_chunks_map + .get_mut(&other) + .expect("should in runtime_chunks_map"); + *other_remaining -= 1; + if *other_remaining == 0 { + ready_chunks.push(other); + } + } + if !ready_chunks.is_empty() { + runtime_chunks.append(&mut ready_chunks); + } + i += 1; + } + // create warning for remaining circular references + if remaining > 0 { + let mut circular: Vec<_> = runtime_chunks_map + .iter() + .filter(|(_, (_, remaining))| *remaining != 0) + .map(|(chunk_ukey, _)| self.chunk_by_ukey.expect_get(chunk_ukey)) + .collect(); + circular.sort_unstable_by(|a, b| a.id().cmp(&b.id())); + runtime_chunks.extend(circular.iter().map(|chunk| chunk.ukey())); + let circular_names = circular + .iter() + .map(|chunk| { + chunk + .name() + .or(chunk.id().map(|id| id.as_str())) + .unwrap_or("no id chunk") + }) + .join(", "); + let error = rspack_error::Error::warning(format!( + "Circular dependency between chunks with runtime ({circular_names})\nThis prevents using hashes of each other and should be avoided." + )); + self.push_diagnostic(error.into()); + } + + // create hash for runtime chunks and the runtime modules within them + // The subsequent runtime chunks and runtime modules will depend on + // the hash results of the previous runtime chunks and runtime modules. + // Therefore, create hashes one by one in sequence. + let start = logger.time("hashing: hash runtime chunks"); + for runtime_chunk_ukey in runtime_chunks { + let runtime_module_hashes = rspack_futures::scope::<_, Result<_>>(|token| { + self + .chunk_graph + .get_chunk_runtime_modules_iterable(&runtime_chunk_ukey) + .for_each(|runtime_module_identifier| { + let s = unsafe { token.used((&self, runtime_module_identifier)) }; + s.spawn(|(compilation, runtime_module_identifier)| async { + let runtime_module = &compilation.runtime_modules[runtime_module_identifier]; + let digest = runtime_module.get_runtime_hash(compilation, None).await?; + Ok((*runtime_module_identifier, digest)) + }); + }) + }) + .await + .into_iter() + .map(|res| res.to_rspack_result()) + .collect::>>()?; + + for res in runtime_module_hashes { + let (mid, digest) = res?; + self.runtime_modules_hash.insert(mid, digest); + } + + let chunk_hash_result = self + .process_chunk_hash(runtime_chunk_ukey, &plugin_driver) + .await?; + let chunk = self.chunk_by_ukey.expect_get(&runtime_chunk_ukey); + let chunk_hashes_changed = chunk.set_hashes( + &mut self.chunk_hashes_artifact, + chunk_hash_result.hash, + chunk_hash_result.content_hash, + ); + if chunk_hashes_changed && let Some(mut mutations) = self.incremental.mutations_write() { + mutations.add(Mutation::ChunkSetHashes { + chunk: runtime_chunk_ukey, + }); + } + } + logger.time_end(start); + + // create full hash + self + .chunk_by_ukey + .values() + .sorted_unstable_by_key(|chunk| chunk.ukey()) + .filter_map(|chunk| chunk.hash(&self.chunk_hashes_artifact)) + .for_each(|hash| { + hash.hash(&mut compilation_hasher); + }); + self.hot_index.hash(&mut compilation_hasher); + self.hash = Some(compilation_hasher.digest(&self.options.output.hash_digest)); + + // re-create runtime chunk hash that depend on full hash + let start = logger.time("hashing: process full hash chunks"); + for chunk_ukey in full_hash_chunks { + for runtime_module_identifier in self + .chunk_graph + .get_chunk_runtime_modules_iterable(&chunk_ukey) + { + let runtime_module = &self.runtime_modules[runtime_module_identifier]; + if runtime_module.full_hash() || runtime_module.dependent_hash() { + let digest = runtime_module.get_runtime_hash(self, None).await?; + self + .runtime_modules_hash + .insert(*runtime_module_identifier, digest); + } + } + let chunk = self.chunk_by_ukey.expect_get(&chunk_ukey); + let new_chunk_hash = { + let chunk_hash = chunk + .hash(&self.chunk_hashes_artifact) + .expect("should have chunk hash"); + let mut hasher = RspackHash::from(&self.options.output); + chunk_hash.hash(&mut hasher); + self.hash.hash(&mut hasher); + hasher.digest(&self.options.output.hash_digest) + }; + let new_content_hash = { + let content_hash = chunk + .content_hash(&self.chunk_hashes_artifact) + .expect("should have content hash"); + content_hash + .iter() + .map(|(source_type, content_hash)| { + let mut hasher = RspackHash::from(&self.options.output); + content_hash.hash(&mut hasher); + self.hash.hash(&mut hasher); + ( + *source_type, + hasher.digest(&self.options.output.hash_digest), + ) + }) + .collect() + }; + let chunk_hashes_changed = chunk.set_hashes( + &mut self.chunk_hashes_artifact, + new_chunk_hash, + new_content_hash, + ); + if chunk_hashes_changed && let Some(mut mutations) = self.incremental.mutations_write() { + mutations.add(Mutation::ChunkSetHashes { chunk: chunk_ukey }); + } + } + logger.time_end(start); + Ok(()) + } + + #[instrument(skip_all)] + pub async fn runtime_modules_code_generation(&mut self) -> Result<()> { + let results = rspack_futures::scope::<_, Result<_>>(|token| { + self + .runtime_modules + .iter() + .for_each(|(runtime_module_identifier, runtime_module)| { + let s = unsafe { token.used((&self, runtime_module_identifier, runtime_module)) }; + s.spawn( + |(compilation, runtime_module_identifier, runtime_module)| async { + let result = runtime_module + .code_generation(compilation, None, None) + .await?; + let source = result + .get(&SourceType::Runtime) + .expect("should have source"); + Ok((*runtime_module_identifier, source.clone())) + }, + ) + }) + }) + .await + .into_iter() + .map(|res| res.to_rspack_result()) + .collect::>>()?; + + let mut runtime_module_sources = IdentifierMap::::default(); + for result in results { + let (runtime_module_identifier, source) = result?; + runtime_module_sources.insert(runtime_module_identifier, source); + } + + self.runtime_modules_code_generation_source = runtime_module_sources; + self + .code_generated_modules + .extend(self.runtime_modules.keys().copied()); + Ok(()) + } + + async fn process_chunk_hash( + &self, + chunk_ukey: ChunkUkey, + plugin_driver: &SharedPluginDriver, + ) -> Result { + let mut hasher = RspackHash::from(&self.options.output); + if let Some(chunk) = self.chunk_by_ukey.get(&chunk_ukey) { + chunk.update_hash(&mut hasher, self); + } + plugin_driver + .compilation_hooks + .chunk_hash + .call(self, &chunk_ukey, &mut hasher) + .await?; + let chunk_hash = hasher.digest(&self.options.output.hash_digest); + + let mut content_hashes: HashMap = HashMap::default(); + plugin_driver + .compilation_hooks + .content_hash + .call(self, &chunk_ukey, &mut content_hashes) + .await?; + + let content_hashes = content_hashes + .into_iter() + .map(|(t, mut hasher)| { + chunk_hash.hash(&mut hasher); + (t, hasher.digest(&self.options.output.hash_digest)) + }) + .collect(); + + Ok(ChunkHashResult { + hash: chunk_hash, + content_hash: content_hashes, + }) + } +} diff --git a/crates/rspack_core/src/compilation/create_module_assets/mod.rs b/crates/rspack_core/src/compilation/create_module_assets/mod.rs new file mode 100644 index 000000000000..e4d08d8d1f48 --- /dev/null +++ b/crates/rspack_core/src/compilation/create_module_assets/mod.rs @@ -0,0 +1,53 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn create_module_assets_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("create module assets"); + self.create_module_assets(plugin_driver).await; + logger.time_end(start); + Ok(()) + } + + #[instrument("Compilation:create_module_assets",target=TRACING_BENCH_TARGET, skip_all)] + async fn create_module_assets(&mut self, _plugin_driver: SharedPluginDriver) { + let mut chunk_asset_map = vec![]; + let mut module_assets = vec![]; + let mg = self.get_module_graph(); + for (identifier, module) in mg.modules() { + let assets = &module.build_info().assets; + if assets.is_empty() { + continue; + } + + for (name, asset) in assets.as_ref() { + module_assets.push((name.clone(), asset.clone())); + } + // assets of executed modules are not in this compilation + if self + .chunk_graph + .chunk_graph_module_by_module_identifier + .contains_key(&identifier) + { + for chunk in self.chunk_graph.get_module_chunks(identifier).iter() { + for name in assets.keys() { + chunk_asset_map.push((*chunk, name.clone())) + } + } + } + } + + for (name, asset) in module_assets { + self.emit_asset(name, asset); + } + + for (chunk, asset_name) in chunk_asset_map { + let chunk = self.chunk_by_ukey.expect_get_mut(&chunk); + chunk.add_auxiliary_file(asset_name); + } + } +} diff --git a/crates/rspack_core/src/compilation/create_module_hashes/mod.rs b/crates/rspack_core/src/compilation/create_module_hashes/mod.rs new file mode 100644 index 000000000000..6e5554625027 --- /dev/null +++ b/crates/rspack_core/src/compilation/create_module_hashes/mod.rs @@ -0,0 +1,135 @@ +use super::*; + +impl Compilation { + pub async fn create_module_hashes_pass(&mut self) -> Result<()> { + // Check if MODULES_HASHES pass is disabled, and clear artifact if needed + if !self + .incremental + .passes_enabled(IncrementalPasses::MODULES_HASHES) + { + self.cgm_hash_artifact.clear(); + } + + let create_module_hashes_modules = if let Some(mutations) = self + .incremental + .mutations_read(IncrementalPasses::MODULES_HASHES) + && !self.cgm_hash_artifact.is_empty() + { + let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ModuleRemove { module } => Some(*module), + _ => None, + }); + for revoked_module in revoked_modules { + self.cgm_hash_artifact.remove(&revoked_module); + } + let mut modules = mutations.get_affected_modules_with_chunk_graph(self); + + // check if module runtime changes + let mg = self.get_module_graph(); + for mi in mg.modules().keys() { + let module_runtimes = self + .chunk_graph + .get_module_runtimes(*mi, &self.chunk_by_ukey); + let module_runtime_keys = module_runtimes + .values() + .map(get_runtime_key) + .collect::>(); + + if let Some(runtime_map) = self.cgm_hash_artifact.get_runtime_map(mi) { + if module_runtimes.is_empty() { + // module has no runtime, skip + continue; + } + if module_runtimes.len() == 1 { + // single runtime + if !matches!(runtime_map.mode, RuntimeMode::SingleEntry) + || runtime_map + .single_runtime + .as_ref() + .expect("should have single runtime for single entry") + != module_runtimes + .values() + .next() + .expect("should have at least one runtime") + { + modules.insert(*mi); + } + } else { + // multiple runtimes + if matches!(runtime_map.mode, RuntimeMode::SingleEntry) { + modules.insert(*mi); + continue; + } + + if runtime_map.map.len() != module_runtimes.len() { + modules.insert(*mi); + continue; + } + + for runtime_key in runtime_map.map.keys() { + if !module_runtime_keys.contains(runtime_key) { + modules.insert(*mi); + break; + } + } + } + } + } + + tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_HASHES, %mutations, ?modules); + let logger = self.get_logger("rspack.incremental.modulesHashes"); + logger.log(format!( + "{} modules are affected, {} in total", + modules.len(), + mg.modules().len() + )); + + modules + } else { + self.get_module_graph().modules().keys().copied().collect() + }; + self + .create_module_hashes(create_module_hashes_modules) + .await + } + + #[instrument("Compilation:create_module_hashes", skip_all)] + pub async fn create_module_hashes(&mut self, modules: IdentifierSet) -> Result<()> { + let mg = self.get_module_graph(); + let chunk_graph = &self.chunk_graph; + let chunk_by_ukey = &self.chunk_by_ukey; + + let results = rspack_futures::scope::<_, Result<_>>(|token| { + for module_identifier in modules { + let s = unsafe { token.used((&*self, &mg, chunk_graph, chunk_by_ukey)) }; + s.spawn( + move |(compilation, mg, chunk_graph, chunk_by_ukey)| async move { + let mut hashes = RuntimeSpecMap::new(); + let module = mg + .module_by_identifier(&module_identifier) + .expect("should have module"); + for runtime in chunk_graph.get_module_runtimes_iter(module_identifier, chunk_by_ukey) { + let hash = module.get_runtime_hash(compilation, Some(runtime)).await?; + hashes.set(runtime.clone(), hash); + } + Ok((module_identifier, hashes)) + }, + ); + } + }) + .await + .into_iter() + .map(|r| r.to_rspack_result()) + .collect::>>()?; + + for result in results { + let (module, hashes) = result?; + if ChunkGraph::set_module_hashes(self, module, hashes) + && let Some(mut mutations) = self.incremental.mutations_write() + { + mutations.add(Mutation::ModuleSetHashes { module }); + } + } + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/finish_module/mod.rs b/crates/rspack_core/src/compilation/finish_module/mod.rs new file mode 100644 index 000000000000..23549acbc18b --- /dev/null +++ b/crates/rspack_core/src/compilation/finish_module/mod.rs @@ -0,0 +1,175 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + #[instrument("Compilation:finish",target=TRACING_BENCH_TARGET, skip_all)] + pub async fn finish_build_module_graph(&mut self) -> Result<()> { + self.in_finish_make.store(false, Ordering::Release); + // clean up the entry deps + let make_artifact = self.build_module_graph_artifact.take(); + self + .build_module_graph_artifact + .replace(finish_build_module_graph(self, make_artifact).await?); + // sync assets to module graph from module_executor + if let Some(module_executor) = &mut self.module_executor { + let mut module_executor = std::mem::take(module_executor); + module_executor.hook_after_finish_modules(self).await?; + self.module_executor = Some(module_executor); + } + // make finished, make artifact should be readonly thereafter. + Ok(()) + } + + #[tracing::instrument("Compilation:collect_build_module_graph_effects", skip_all)] + pub async fn collect_build_module_graph_effects( + &mut self, + dependencies_diagnostics_artifact: &mut DependenciesDiagnosticsArtifact, + async_modules_artifact: &mut AsyncModulesArtifact, + ) -> Result> { + let logger = self.get_logger("rspack.Compilation"); + if let Some(mut mutations) = self.incremental.mutations_write() { + mutations.extend( + self + .build_module_graph_artifact + .affected_dependencies + .updated() + .iter() + .map(|&dependency| Mutation::DependencyUpdate { dependency }), + ); + mutations.extend( + self + .build_module_graph_artifact + .affected_modules + .removed() + .iter() + .map(|&module| Mutation::ModuleRemove { module }), + ); + mutations.extend( + self + .build_module_graph_artifact + .affected_modules + .updated() + .iter() + .map(|&module| Mutation::ModuleUpdate { module }), + ); + mutations.extend( + self + .build_module_graph_artifact + .affected_modules + .added() + .iter() + .map(|&module| Mutation::ModuleAdd { module }), + ); + tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MAKE, %mutations); + } + + let start = logger.time("finish modules"); + // finish_modules means the module graph (modules, connections, dependencies) are + // frozen and start to optimize (provided exports, infer async, etc.) based on the + // module graph, so any kind of change that affect these should be done before the + // finish_modules + self + .plugin_driver + .clone() + .compilation_hooks + .finish_modules + .call(self, async_modules_artifact) + .await?; + + logger.time_end(start); + + // https://github.com/webpack/webpack/blob/19ca74127f7668aaf60d59f4af8fcaee7924541a/lib/Compilation.js#L2988 + self.module_graph_cache_artifact.freeze(); + // Collect dependencies diagnostics at here to make sure: + // 1. after finish_modules: has provide exports info + // 2. before optimize dependencies: side effects free module hasn't been skipped + let mut all_diagnostics = + self.collect_dependencies_diagnostics(dependencies_diagnostics_artifact); + self.module_graph_cache_artifact.unfreeze(); + + // take make diagnostics + let diagnostics = self.build_module_graph_artifact.diagnostics(); + all_diagnostics.extend(diagnostics); + Ok(all_diagnostics) + } + + #[tracing::instrument("Compilation:collect_dependencies_diagnostics", skip_all)] + fn collect_dependencies_diagnostics( + &self, + dependencies_diagnostics_artifact: &mut DependenciesDiagnosticsArtifact, + ) -> Vec { + // Compute modules while holding the lock, then release it + let (modules, has_mutations) = { + let mutations = self + .incremental + .mutations_read(IncrementalPasses::DEPENDENCIES_DIAGNOSTICS); + + // TODO move diagnostic collect to make + if let Some(mutations) = mutations { + if !dependencies_diagnostics_artifact.is_empty() { + let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ModuleRemove { module } => Some(*module), + _ => None, + }); + for revoked_module in revoked_modules { + dependencies_diagnostics_artifact.remove(&revoked_module); + } + let modules = mutations.get_affected_modules_with_module_graph(self.get_module_graph()); + let logger = self.get_logger("rspack.incremental.dependenciesDiagnostics"); + logger.log(format!( + "{} modules are affected, {} in total", + modules.len(), + self.get_module_graph().modules().len() + )); + (modules, true) + } else { + ( + self.get_module_graph().modules().keys().copied().collect(), + true, + ) + } + } else { + ( + self.get_module_graph().modules().keys().copied().collect(), + false, + ) + } + }; + + let module_graph = self.get_module_graph(); + let module_graph_cache = &self.module_graph_cache_artifact; + let dependencies_diagnostics: DependenciesDiagnosticsArtifact = modules + .par_iter() + .map(|module_identifier| { + let mgm = module_graph + .module_graph_module_by_identifier(module_identifier) + .expect("should have mgm"); + let diagnostics = mgm + .all_dependencies + .iter() + .filter_map(|dependency_id| { + let dependency = module_graph.dependency_by_id(dependency_id); + dependency + .get_diagnostics(module_graph, module_graph_cache) + .map(|diagnostics| { + diagnostics.into_iter().map(|mut diagnostic| { + diagnostic.module_identifier = Some(*module_identifier); + diagnostic.loc = dependency.loc(); + diagnostic + }) + }) + }) + .flatten() + .collect::>(); + (*module_identifier, diagnostics) + }) + .collect(); + let all_modules_diagnostics = if has_mutations { + dependencies_diagnostics_artifact.extend(dependencies_diagnostics); + dependencies_diagnostics_artifact.clone() + } else { + dependencies_diagnostics + }; + all_modules_diagnostics.into_values().flatten().collect() + } +} diff --git a/crates/rspack_core/src/compilation/mod.rs b/crates/rspack_core/src/compilation/mod.rs index f124be2ee07b..8a0b8c887b6e 100644 --- a/crates/rspack_core/src/compilation/mod.rs +++ b/crates/rspack_core/src/compilation/mod.rs @@ -1,5 +1,24 @@ pub mod build_chunk_graph; pub mod build_module_graph; +mod assign_runtime_ids; +mod create_module_assets; +mod create_chunk_assets; +mod process_assets; +mod after_seal; +mod code_generation; +mod runtime_requirements; +mod create_hash; +mod create_module_hashes; +mod finish_module; +mod optimize_dependencies; +mod optimize_modules; +mod optimize_chunks; +mod optimize_tree; +mod optimize_chunk_modules; +mod module_ids; +mod chunk_ids; +mod optimize_code_generation; +mod run_passes; use std::{ collections::{VecDeque, hash_map}, fmt::{self, Debug}, @@ -12,7 +31,6 @@ use std::{ }; use atomic_refcell::AtomicRefCell; -use build_chunk_graph::{artifact::use_code_splitting_cache, build_chunk_graph}; use dashmap::DashSet; use futures::future::BoxFuture; use indexmap::IndexMap; @@ -163,49 +181,6 @@ static COMPILATION_ID: AtomicU32 = AtomicU32::new(0); /// Use macro to prevent cargo shear from failing and reporting errors /// due to the inability to parse the async closure syntax /// https://github.com/Boshen/cargo-shear/issues/143 -macro_rules! process_runtime_requirement_hook_macro { - ($name: ident, $s: ty, $c: ty) => { - async fn $name( - self: $s, - requirements: &mut RuntimeGlobals, - call_hook: impl for<'a> Fn( - $c, - &'a RuntimeGlobals, - &'a RuntimeGlobals, - &'a mut RuntimeGlobals, - ) -> BoxFuture<'a, Result<()>>, - ) -> Result<()> { - let mut runtime_requirements_mut = *requirements; - let mut runtime_requirements; - - loop { - runtime_requirements = runtime_requirements_mut; - runtime_requirements_mut = RuntimeGlobals::default(); - // runtime_requirements: rt_requirements of last time - // runtime_requirements_mut: changed rt_requirements - // requirements: all rt_requirements - call_hook( - self, - requirements, - &runtime_requirements, - &mut runtime_requirements_mut, - ) - .await?; - - // check if we have changes to runtime_requirements - runtime_requirements_mut = - runtime_requirements_mut.difference(requirements.intersection(runtime_requirements_mut)); - if runtime_requirements_mut.is_empty() { - break; - } else { - requirements.insert(runtime_requirements_mut); - } - } - Ok(()) - } - }; -} - #[derive(Debug)] pub struct Compilation { /// get_compilation_hooks(compilation.id) @@ -1060,372 +1035,6 @@ impl Compilation { .collect::>())) } - #[instrument("Compilation:code_generation",target=TRACING_BENCH_TARGET, skip_all)] - async fn code_generation(&mut self, modules: IdentifierSet) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let mut codegen_cache_counter = match self.options.cache { - CacheOptions::Disabled => None, - _ => Some(logger.cache("module code generation cache")), - }; - - let module_graph = self.get_module_graph(); - let mut no_codegen_dependencies_modules = IdentifierSet::default(); - let mut has_codegen_dependencies_modules = IdentifierSet::default(); - for module_identifier in modules { - let module = module_graph - .module_by_identifier(&module_identifier) - .expect("should have module"); - if module.get_code_generation_dependencies().is_none() { - no_codegen_dependencies_modules.insert(module_identifier); - } else { - has_codegen_dependencies_modules.insert(module_identifier); - } - } - - self - .code_generation_modules(&mut codegen_cache_counter, no_codegen_dependencies_modules) - .await?; - self - .code_generation_modules(&mut codegen_cache_counter, has_codegen_dependencies_modules) - .await?; - - if let Some(counter) = codegen_cache_counter { - logger.cache_end(counter); - } - - Ok(()) - } - - pub(crate) async fn code_generation_modules( - &mut self, - cache_counter: &mut Option, - modules: IdentifierSet, - ) -> Result<()> { - let chunk_graph = &self.chunk_graph; - let module_graph = self.get_module_graph(); - let mut jobs = Vec::new(); - for module in modules { - let mut map: HashMap = HashMap::default(); - for runtime in chunk_graph.get_module_runtimes_iter(module, &self.chunk_by_ukey) { - let hash = ChunkGraph::get_module_hash(self, module, runtime) - .expect("should have cgm.hash in code generation"); - let scope = self - .plugin_driver - .compilation_hooks - .concatenation_scope - .call(self, module) - .await?; - if let Some(job) = map.get_mut(hash) { - job.runtimes.push(runtime.clone()); - } else { - map.insert( - hash.clone(), - CodeGenerationJob { - module, - hash: hash.clone(), - runtime: runtime.clone(), - runtimes: vec![runtime.clone()], - scope, - }, - ); - } - } - jobs.extend(map.into_values()); - } - - let results = rspack_futures::scope::<_, _>(|token| { - jobs.into_iter().for_each(|job| { - // SAFETY: await immediately and trust caller to poll future entirely - let s = unsafe { token.used((&self, &module_graph, job)) }; - - s.spawn(|(this, module_graph, job)| async { - let options = &this.options; - let old_cache = &this.old_cache; - - let module = module_graph - .module_by_identifier(&job.module) - .expect("should have module"); - let codegen_res = old_cache - .code_generate_occasion - .use_cache(&job, || async { - module - .code_generation(this, Some(&job.runtime), job.scope.clone()) - .await - .map(|mut codegen_res| { - codegen_res.set_hash( - &options.output.hash_function, - &options.output.hash_digest, - &options.output.hash_salt, - ); - codegen_res - }) - }) - .await; - - (job.module, job.runtimes, codegen_res) - }) - }) - }) - .await; - let results = results - .into_iter() - .map(|res| res.to_rspack_result()) - .collect::>>()?; - - for (module, runtimes, (codegen_res, from_cache)) in results { - if let Some(counter) = cache_counter { - if from_cache { - counter.hit(); - } else { - counter.miss(); - } - } - let codegen_res = match codegen_res { - Ok(codegen_res) => codegen_res, - Err(err) => { - let mut diagnostic = Diagnostic::from(err); - diagnostic.module_identifier = Some(module); - self.push_diagnostic(diagnostic); - let mut codegen_res = CodeGenerationResult::default(); - codegen_res.set_hash( - &self.options.output.hash_function, - &self.options.output.hash_digest, - &self.options.output.hash_salt, - ); - codegen_res - } - }; - self - .code_generation_results - .insert(module, codegen_res, runtimes); - self.code_generated_modules.insert(module); - } - Ok(()) - } - - #[instrument("Compilation:create_module_assets",target=TRACING_BENCH_TARGET, skip_all)] - async fn create_module_assets(&mut self, _plugin_driver: SharedPluginDriver) { - let mut chunk_asset_map = vec![]; - let mut module_assets = vec![]; - let mg = self.get_module_graph(); - for (identifier, module) in mg.modules() { - let assets = &module.build_info().assets; - if assets.is_empty() { - continue; - } - - for (name, asset) in assets.as_ref() { - module_assets.push((name.clone(), asset.clone())); - } - // assets of executed modules are not in this compilation - if self - .chunk_graph - .chunk_graph_module_by_module_identifier - .contains_key(&identifier) - { - for chunk in self.chunk_graph.get_module_chunks(identifier).iter() { - for name in assets.keys() { - chunk_asset_map.push((*chunk, name.clone())) - } - } - } - } - - for (name, asset) in module_assets { - self.emit_asset(name, asset); - } - - for (chunk, asset_name) in chunk_asset_map { - let chunk = self.chunk_by_ukey.expect_get_mut(&chunk); - chunk.add_auxiliary_file(asset_name); - } - } - - #[instrument("Compilation::create_chunk_assets",target=TRACING_BENCH_TARGET, skip_all)] - async fn create_chunk_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - if (self.options.output.filename.has_hash_placeholder() - || self.options.output.chunk_filename.has_hash_placeholder() - || self.options.output.css_filename.has_hash_placeholder() - || self - .options - .output - .css_chunk_filename - .has_hash_placeholder()) - && let Some(diagnostic) = self.incremental.disable_passes( - IncrementalPasses::CHUNKS_RENDER, - "Chunk filename that dependent on full hash", - "chunk filename that dependent on full hash is not supported in incremental compilation", - ) - && let Some(diagnostic) = diagnostic - { - self.push_diagnostic(diagnostic); - } - - // Check if CHUNKS_RENDER pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::CHUNKS_RENDER) - { - self.chunk_render_artifact.clear(); - } - - let chunks = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::CHUNKS_RENDER) - && !self.chunk_render_artifact.is_empty() - { - let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ChunkRemove { chunk } => Some(*chunk), - _ => None, - }); - for removed_chunk in removed_chunks { - self.chunk_render_artifact.remove(&removed_chunk); - } - self - .chunk_render_artifact - .retain(|chunk, _| self.chunk_by_ukey.contains(chunk)); - let chunks: UkeySet = mutations - .iter() - .filter_map(|mutation| match mutation { - Mutation::ChunkSetHashes { chunk } => Some(*chunk), - _ => None, - }) - .collect(); - tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::CHUNKS_RENDER, %mutations); - let logger = self.get_logger("rspack.incremental.chunksRender"); - logger.log(format!( - "{} chunks are affected, {} in total", - chunks.len(), - self.chunk_by_ukey.len() - )); - chunks - } else { - self.chunk_by_ukey.keys().copied().collect() - }; - let results = rspack_futures::scope::<_, Result<_>>(|token| { - chunks.iter().for_each(|chunk| { - // SAFETY: await immediately and trust caller to poll future entirely - let s = unsafe { token.used((&self, &plugin_driver, chunk)) }; - - s.spawn(|(this, plugin_driver, chunk)| async { - let mut manifests = Vec::new(); - let mut diagnostics = Vec::new(); - plugin_driver - .compilation_hooks - .render_manifest - .call(this, chunk, &mut manifests, &mut diagnostics) - .await?; - - rspack_error::Result::Ok(( - *chunk, - ChunkRenderResult { - manifests, - diagnostics, - }, - )) - }); - }) - }) - .await; - - let mut chunk_render_results: UkeyMap = Default::default(); - for result in results { - let item = result.to_rspack_result()?; - let (key, value) = item?; - chunk_render_results.insert(key, value); - } - let chunk_ukey_and_manifest = if self - .incremental - .passes_enabled(IncrementalPasses::CHUNKS_RENDER) - { - self.chunk_render_artifact.extend(chunk_render_results); - self.chunk_render_artifact.clone() - } else { - chunk_render_results - }; - - for ( - chunk_ukey, - ChunkRenderResult { - manifests, - diagnostics, - }, - ) in chunk_ukey_and_manifest - { - self.extend_diagnostics(diagnostics); - - for file_manifest in manifests { - let filename = file_manifest.filename; - let current_chunk = self.chunk_by_ukey.expect_get_mut(&chunk_ukey); - - current_chunk.set_rendered(true); - if file_manifest.auxiliary { - current_chunk.add_auxiliary_file(filename.clone()); - } else { - current_chunk.add_file(filename.clone()); - } - - self.emit_asset( - filename.clone(), - CompilationAsset::new(Some(file_manifest.source), file_manifest.info), - ); - - _ = self - .chunk_asset(chunk_ukey, &filename, plugin_driver.clone()) - .await; - } - } - - Ok(()) - } - - #[instrument("Compilation:process_assets",target=TRACING_BENCH_TARGET, skip_all)] - async fn process_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - plugin_driver - .compilation_hooks - .process_assets - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.processAssets")) - } - - #[instrument("Compilation:after_process_assets", skip_all)] - async fn after_process_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - let mut diagnostics: Vec = vec![]; - - let res = plugin_driver - .compilation_hooks - .after_process_assets - .call(self, &mut diagnostics) - .await; - - self.extend_diagnostics(diagnostics); - res - } - - #[instrument("Compilation:after_seal", target=TRACING_BENCH_TARGET,skip_all)] - async fn after_seal(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - plugin_driver.compilation_hooks.after_seal.call(self).await - } - - // #[instrument( - // name = "Compilation:chunk_asset", - // skip(self, plugin_driver, chunk_ukey) - // )] - async fn chunk_asset( - &self, - chunk_ukey: ChunkUkey, - filename: &str, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - plugin_driver - .compilation_hooks - .chunk_asset - .call(self, &chunk_ukey, filename) - .await?; - Ok(()) - } - pub fn entry_modules(&self) -> IdentifierSet { let module_graph = self.get_module_graph(); self @@ -1453,176 +1062,6 @@ impl Compilation { self.chunk_group_by_ukey.expect_get_mut(ukey) } - #[instrument("Compilation:finish",target=TRACING_BENCH_TARGET, skip_all)] - pub async fn finish_build_module_graph(&mut self) -> Result<()> { - self.in_finish_make.store(false, Ordering::Release); - // clean up the entry deps - let make_artifact = self.build_module_graph_artifact.take(); - self - .build_module_graph_artifact - .replace(finish_build_module_graph(self, make_artifact).await?); - // sync assets to module graph from module_executor - if let Some(module_executor) = &mut self.module_executor { - let mut module_executor = std::mem::take(module_executor); - module_executor.hook_after_finish_modules(self).await?; - self.module_executor = Some(module_executor); - } - // make finished, make artifact should be readonly thereafter. - Ok(()) - } - // collect build module graph effects for incremental compilation - #[tracing::instrument("Compilation:collect_build_module_graph_effects", skip_all)] - pub async fn collect_build_module_graph_effects( - &mut self, - dependencies_diagnostics_artifact: &mut DependenciesDiagnosticsArtifact, - async_modules_artifact: &mut AsyncModulesArtifact, - ) -> Result> { - let logger = self.get_logger("rspack.Compilation"); - if let Some(mut mutations) = self.incremental.mutations_write() { - mutations.extend( - self - .build_module_graph_artifact - .affected_dependencies - .updated() - .iter() - .map(|&dependency| Mutation::DependencyUpdate { dependency }), - ); - mutations.extend( - self - .build_module_graph_artifact - .affected_modules - .removed() - .iter() - .map(|&module| Mutation::ModuleRemove { module }), - ); - mutations.extend( - self - .build_module_graph_artifact - .affected_modules - .updated() - .iter() - .map(|&module| Mutation::ModuleUpdate { module }), - ); - mutations.extend( - self - .build_module_graph_artifact - .affected_modules - .added() - .iter() - .map(|&module| Mutation::ModuleAdd { module }), - ); - tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MAKE, %mutations); - } - - let start = logger.time("finish modules"); - // finish_modules means the module graph (modules, connections, dependencies) are - // frozen and start to optimize (provided exports, infer async, etc.) based on the - // module graph, so any kind of change that affect these should be done before the - // finish_modules - self - .plugin_driver - .clone() - .compilation_hooks - .finish_modules - .call(self, async_modules_artifact) - .await?; - - logger.time_end(start); - - // https://github.com/webpack/webpack/blob/19ca74127f7668aaf60d59f4af8fcaee7924541a/lib/Compilation.js#L2988 - self.module_graph_cache_artifact.freeze(); - // Collect dependencies diagnostics at here to make sure: - // 1. after finish_modules: has provide exports info - // 2. before optimize dependencies: side effects free module hasn't been skipped - let mut all_diagnostics = - self.collect_dependencies_diagnostics(dependencies_diagnostics_artifact); - self.module_graph_cache_artifact.unfreeze(); - - // take make diagnostics - let diagnostics = self.build_module_graph_artifact.diagnostics(); - all_diagnostics.extend(diagnostics); - Ok(all_diagnostics) - } - #[tracing::instrument("Compilation:collect_dependencies_diagnostics", skip_all)] - fn collect_dependencies_diagnostics( - &self, - dependencies_diagnostics_artifact: &mut DependenciesDiagnosticsArtifact, - ) -> Vec { - // Compute modules while holding the lock, then release it - let (modules, has_mutations) = { - let mutations = self - .incremental - .mutations_read(IncrementalPasses::DEPENDENCIES_DIAGNOSTICS); - - // TODO move diagnostic collect to make - if let Some(mutations) = mutations { - if !dependencies_diagnostics_artifact.is_empty() { - let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ModuleRemove { module } => Some(*module), - _ => None, - }); - for revoked_module in revoked_modules { - dependencies_diagnostics_artifact.remove(&revoked_module); - } - let modules = mutations.get_affected_modules_with_module_graph(self.get_module_graph()); - let logger = self.get_logger("rspack.incremental.dependenciesDiagnostics"); - logger.log(format!( - "{} modules are affected, {} in total", - modules.len(), - self.get_module_graph().modules().len() - )); - (modules, true) - } else { - ( - self.get_module_graph().modules().keys().copied().collect(), - true, - ) - } - } else { - ( - self.get_module_graph().modules().keys().copied().collect(), - false, - ) - } - }; - - let module_graph = self.get_module_graph(); - let module_graph_cache = &self.module_graph_cache_artifact; - let dependencies_diagnostics: DependenciesDiagnosticsArtifact = modules - .par_iter() - .map(|module_identifier| { - let mgm = module_graph - .module_graph_module_by_identifier(module_identifier) - .expect("should have mgm"); - let diagnostics = mgm - .all_dependencies - .iter() - .filter_map(|dependency_id| { - let dependency = module_graph.dependency_by_id(dependency_id); - dependency - .get_diagnostics(module_graph, module_graph_cache) - .map(|diagnostics| { - diagnostics.into_iter().map(|mut diagnostic| { - diagnostic.module_identifier = Some(*module_identifier); - diagnostic.loc = dependency.loc(); - diagnostic - }) - }) - }) - .flatten() - .collect::>(); - (*module_identifier, diagnostics) - }) - .collect(); - let all_modules_diagnostics = if has_mutations { - dependencies_diagnostics_artifact.extend(dependencies_diagnostics); - dependencies_diagnostics_artifact.clone() - } else { - dependencies_diagnostics - }; - all_modules_diagnostics.into_values().flatten().collect() - } - #[instrument("Compilation:seal", skip_all)] pub async fn seal(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { // add a checkpoint here since we may modify module graph later in incremental compilation @@ -1635,8 +1074,6 @@ impl Compilation { self.module_static_cache_artifact.freeze(); } - let logger = self.get_logger("rspack.Compilation"); - // https://github.com/webpack/webpack/blob/main/lib/Compilation.js#L2809 plugin_driver .compilation_hooks @@ -1645,420 +1082,7 @@ impl Compilation { .await .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.seal"))?; - let start = logger.time("optimize dependencies"); - // https://github.com/webpack/webpack/blob/d15c73469fd71cf98734685225250148b68ddc79/lib/Compilation.js#L2812-L2814 - - let mut diagnostics: Vec = vec![]; - let mut side_effects_optimize_artifact = self.side_effects_optimize_artifact.take(); - let mut build_module_graph_artifact = self.build_module_graph_artifact.take(); - while matches!( - plugin_driver - .compilation_hooks - .optimize_dependencies - .call( - self, - &mut side_effects_optimize_artifact, - &mut build_module_graph_artifact, - &mut diagnostics - ) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeDependencies"))?, - Some(true) - ) {} - self - .side_effects_optimize_artifact - .replace(side_effects_optimize_artifact); - self - .build_module_graph_artifact - .replace(build_module_graph_artifact); - self.extend_diagnostics(diagnostics); - - logger.time_end(start); - - // ModuleGraph is frozen for now on, we have a module graph that won't change - // so now we can start to create a chunk graph based on the module graph - - let start = logger.time("create chunks"); - self.module_graph_cache_artifact.freeze(); - use_code_splitting_cache(self, |compilation| async { - let start = logger.time("rebuild chunk graph"); - build_chunk_graph(compilation)?; - compilation - .chunk_graph - .generate_dot(compilation, "after-code-splitting") - .await; - logger.time_end(start); - Ok(compilation) - }) - .await?; - - let mut diagnostics = vec![]; - while matches!( - plugin_driver - .compilation_hooks - .optimize_modules - .call(self, &mut diagnostics) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeModules"))?, - Some(true) - ) {} - self.extend_diagnostics(diagnostics); - - plugin_driver - .compilation_hooks - .after_optimize_modules - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterOptimizeModules"))?; - - while matches!( - plugin_driver - .compilation_hooks - .optimize_chunks - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunks"))?, - Some(true) - ) {} - - logger.time_end(start); - - let start = logger.time("optimize"); - plugin_driver - .compilation_hooks - .optimize_tree - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeTree"))?; - - plugin_driver - .compilation_hooks - .optimize_chunk_modules - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunkModules"))?; - logger.time_end(start); - - // ChunkGraph is frozen for now on, we have a chunk graph that won't change - // so now we can start to generate assets based on the chunk graph - - let start = logger.time("module ids"); - - // Check if MODULE_IDS pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::MODULE_IDS) - { - self.module_ids_artifact.clear(); - } - - let mut diagnostics = vec![]; - let mut module_ids_artifact = mem::take(&mut self.module_ids_artifact); - plugin_driver - .compilation_hooks - .module_ids - .call(self, &mut module_ids_artifact, &mut diagnostics) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.moduleIds"))?; - self.module_ids_artifact = module_ids_artifact; - self.extend_diagnostics(diagnostics); - logger.time_end(start); - - let start = logger.time("chunk ids"); - - // Check if CHUNK_IDS pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::CHUNK_IDS) - { - self.named_chunk_ids_artifact.clear(); - } - - let mut diagnostics = vec![]; - let mut chunk_by_ukey = mem::take(&mut self.chunk_by_ukey); - let mut named_chunk_ids_artifact = mem::take(&mut self.named_chunk_ids_artifact); - plugin_driver - .compilation_hooks - .chunk_ids - .call( - self, - &mut chunk_by_ukey, - &mut named_chunk_ids_artifact, - &mut diagnostics, - ) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.chunkIds"))?; - self.chunk_by_ukey = chunk_by_ukey; - self.named_chunk_ids_artifact = named_chunk_ids_artifact; - self.extend_diagnostics(diagnostics); - logger.time_end(start); - - self.assign_runtime_ids(); - - let start = logger.time("optimize code generation"); - plugin_driver - .compilation_hooks - .optimize_code_generation - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeCodeGeneration"))?; - logger.time_end(start); - - // Check if MODULES_HASHES pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::MODULES_HASHES) - { - self.cgm_hash_artifact.clear(); - } - - let create_module_hashes_modules = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::MODULES_HASHES) - && !self.cgm_hash_artifact.is_empty() - { - let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ModuleRemove { module } => Some(*module), - _ => None, - }); - for revoked_module in revoked_modules { - self.cgm_hash_artifact.remove(&revoked_module); - } - let mut modules = mutations.get_affected_modules_with_chunk_graph(self); - - // check if module runtime changes - let mg = self.get_module_graph(); - for mi in mg.modules().keys() { - let module_runtimes = self - .chunk_graph - .get_module_runtimes(*mi, &self.chunk_by_ukey); - let module_runtime_keys = module_runtimes - .values() - .map(get_runtime_key) - .collect::>(); - - if let Some(runtime_map) = self.cgm_hash_artifact.get_runtime_map(mi) { - if module_runtimes.is_empty() { - // module has no runtime, skip - continue; - } - if module_runtimes.len() == 1 { - // single runtime - if !matches!(runtime_map.mode, RuntimeMode::SingleEntry) - || runtime_map - .single_runtime - .as_ref() - .expect("should have single runtime for single entry") - != module_runtimes - .values() - .next() - .expect("should have at least one runtime") - { - modules.insert(*mi); - } - } else { - // multiple runtimes - if matches!(runtime_map.mode, RuntimeMode::SingleEntry) { - modules.insert(*mi); - continue; - } - - if runtime_map.map.len() != module_runtimes.len() { - modules.insert(*mi); - continue; - } - - for runtime_key in runtime_map.map.keys() { - if !module_runtime_keys.contains(runtime_key) { - modules.insert(*mi); - break; - } - } - } - } - } - - tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_HASHES, %mutations, ?modules); - let logger = self.get_logger("rspack.incremental.modulesHashes"); - logger.log(format!( - "{} modules are affected, {} in total", - modules.len(), - mg.modules().len() - )); - - modules - } else { - self.get_module_graph().modules().keys().copied().collect() - }; - self - .create_module_hashes(create_module_hashes_modules) - .await?; - - let start = logger.time("code generation"); - let code_generation_modules = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::MODULES_CODEGEN) - && !self.code_generation_results.is_empty() - { - let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ModuleRemove { module } => Some(*module), - _ => None, - }); - for revoked_module in revoked_modules { - self.code_generation_results.remove(&revoked_module); - } - let modules: IdentifierSet = mutations - .iter() - .filter_map(|mutation| match mutation { - Mutation::ModuleSetHashes { module } => Some(*module), - _ => None, - }) - .collect(); - // also cleanup for updated modules, for `insert(); insert();` the second insert() won't override the first insert() on code_generation_results - for module in &modules { - self.code_generation_results.remove(module); - } - tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_CODEGEN, %mutations); - let logger = self.get_logger("rspack.incremental.modulesCodegen"); - logger.log(format!( - "{} modules are affected, {} in total", - modules.len(), - self.get_module_graph().modules().len() - )); - modules - } else { - self.code_generation_results = Default::default(); - self.get_module_graph().modules().keys().copied().collect() - }; - self.code_generation(code_generation_modules).await?; - - let mut diagnostics = vec![]; - plugin_driver - .compilation_hooks - .after_code_generation - .call(self, &mut diagnostics) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterCodeGeneration"))?; - self.extend_diagnostics(diagnostics); - - logger.time_end(start); - - let start = logger.time("runtime requirements"); - let process_runtime_requirements_modules = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::MODULES_RUNTIME_REQUIREMENTS) - && !self.cgm_runtime_requirements_artifact.is_empty() - { - let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ModuleRemove { module } => Some(*module), - _ => None, - }); - for revoked_module in revoked_modules { - self - .cgm_runtime_requirements_artifact - .remove(&revoked_module); - } - let modules: IdentifierSet = mutations - .iter() - .filter_map(|mutation| match mutation { - Mutation::ModuleSetHashes { module } => Some(*module), - _ => None, - }) - .collect(); - let logger = self.get_logger("rspack.incremental.modulesRuntimeRequirements"); - logger.log(format!( - "{} modules are affected, {} in total", - modules.len(), - self.get_module_graph().modules().len() - )); - modules - } else { - self.cgm_runtime_requirements_artifact = Default::default(); - self.get_module_graph().modules().keys().copied().collect() - }; - self - .process_modules_runtime_requirements( - process_runtime_requirements_modules, - plugin_driver.clone(), - ) - .await?; - let runtime_chunks = self.get_chunk_graph_entries().collect(); - - // Check if CHUNKS_RUNTIME_REQUIREMENTS pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) - { - self.cgc_runtime_requirements_artifact.clear(); - } - - let process_runtime_requirements_chunks = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) - && !self.cgc_runtime_requirements_artifact.is_empty() - { - let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ChunkRemove { chunk } => Some(chunk), - _ => None, - }); - for removed_chunk in removed_chunks { - self.cgc_runtime_requirements_artifact.remove(removed_chunk); - } - let affected_chunks = mutations.get_affected_chunks_with_chunk_graph(self); - for affected_chunk in &affected_chunks { - self - .cgc_runtime_requirements_artifact - .remove(affected_chunk); - } - for runtime_chunk in &runtime_chunks { - self.cgc_runtime_requirements_artifact.remove(runtime_chunk); - } - self - .cgc_runtime_requirements_artifact - .retain(|chunk, _| self.chunk_by_ukey.contains(chunk)); - let logger = self.get_logger("rspack.incremental.chunksRuntimeRequirements"); - logger.log(format!( - "{} chunks are affected, {} in total", - affected_chunks.len(), - self.chunk_by_ukey.len() - )); - affected_chunks - } else { - self.chunk_by_ukey.keys().copied().collect() - }; - self - .process_chunks_runtime_requirements( - process_runtime_requirements_chunks, - runtime_chunks, - plugin_driver.clone(), - ) - .await?; - logger.time_end(start); - - let start = logger.time("hashing"); - self.create_hash(plugin_driver.clone()).await?; - self.runtime_modules_code_generation().await?; - logger.time_end(start); - - let start = logger.time("create module assets"); - self.create_module_assets(plugin_driver.clone()).await; - logger.time_end(start); - - let start = logger.time("create chunk assets"); - self.create_chunk_assets(plugin_driver.clone()).await?; - logger.time_end(start); - - let start = logger.time("process assets"); - self.process_assets(plugin_driver.clone()).await?; - logger.time_end(start); - - let start = logger.time("after process assets"); - self.after_process_assets(plugin_driver.clone()).await?; - logger.time_end(start); - - let start = logger.time("after seal"); - self.after_seal(plugin_driver).await?; - logger.time_end(start); + self.run_passes(plugin_driver).await?; if !self.options.mode.is_development() { self.module_static_cache_artifact.unfreeze(); @@ -2066,47 +1090,6 @@ impl Compilation { Ok(()) } - pub fn assign_runtime_ids(&mut self) { - fn process_entrypoint( - entrypoint_ukey: &ChunkGroupUkey, - chunk_group_by_ukey: &ChunkGroupByUkey, - chunk_by_ukey: &ChunkByUkey, - chunk_graph: &mut ChunkGraph, - ) { - let entrypoint = chunk_group_by_ukey.expect_get(entrypoint_ukey); - let runtime = entrypoint - .kind - .get_entry_options() - .and_then(|o| match &o.runtime { - Some(EntryRuntime::String(s)) => Some(s.to_owned()), - _ => None, - }) - .or(entrypoint.name().map(|n| n.to_string())); - if let (Some(runtime), Some(chunk)) = ( - runtime, - chunk_by_ukey.get(&entrypoint.get_runtime_chunk(chunk_group_by_ukey)), - ) { - chunk_graph.set_runtime_id(runtime, chunk.id().map(|id| id.to_string())); - } - } - for i in self.entrypoints.iter() { - process_entrypoint( - i.1, - &self.chunk_group_by_ukey, - &self.chunk_by_ukey, - &mut self.chunk_graph, - ) - } - for i in self.async_entrypoints.iter() { - process_entrypoint( - i, - &self.chunk_group_by_ukey, - &self.chunk_by_ukey, - &mut self.chunk_graph, - ) - } - } - pub fn get_chunk_graph_entries(&self) -> impl Iterator + use<'_> { let entries = self.entrypoints.values().map(|entrypoint_ukey| { let entrypoint = self.chunk_group_by_ukey.expect_get(entrypoint_ukey); @@ -2118,727 +1101,6 @@ impl Compilation { }); entries.chain(async_entries) } - - #[instrument("Compilation:process_modules_runtime_requirements", skip_all)] - pub async fn process_modules_runtime_requirements( - &mut self, - modules: IdentifierSet, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("runtime requirements.modules"); - - let module_results = rspack_futures::scope::<_, Result<_>>(|token| { - modules - .into_iter() - .filter(|module| self.chunk_graph.get_number_of_module_chunks(*module) > 0) - .for_each(|module| { - let s = unsafe { token.used((&self, &plugin_driver)) }; - s.spawn(move |(compilation, plugin_driver)| async move { - let mut map = RuntimeSpecMap::new(); - let runtimes = compilation - .chunk_graph - .get_module_runtimes_iter(module, &compilation.chunk_by_ukey); - for runtime in runtimes { - let runtime_requirements = compilation - .old_cache - .process_runtime_requirements_occasion - .use_cache(module, runtime, compilation, || async { - let mut runtime_requirements = compilation - .code_generation_results - .get_runtime_requirements(&module, Some(runtime)); - - plugin_driver - .compilation_hooks - .additional_module_runtime_requirements - .call(compilation, &module, &mut runtime_requirements) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.additionalModuleRuntimeRequirements"))?; - - compilation - .process_runtime_requirement_hook(&mut runtime_requirements, { - let plugin_driver = plugin_driver.clone(); - move |compilation, - all_runtime_requirements, - runtime_requirements, - runtime_requirements_mut| { - Box::pin({ - let plugin_driver = plugin_driver.clone(); - async move { - plugin_driver - .compilation_hooks - .runtime_requirement_in_module - .call( - compilation, - &module, - all_runtime_requirements, - runtime_requirements, - runtime_requirements_mut, - ) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.runtimeRequirementInModule"))?; - Ok(()) - }}) - } - }) - .await?; - Ok(runtime_requirements) - }) - .await?; - map.set(runtime.clone(), runtime_requirements); - } - Ok((module, map)) - }); - }); - }) - .await - .into_iter() - .map(|r| r.to_rspack_result()) - .collect::>>()?; - - for entry in module_results { - let (module, map) = entry?; - ChunkGraph::set_module_runtime_requirements(self, module, map); - } - logger.time_end(start); - Ok(()) - } - - #[instrument(name = "Compilation:process_chunks_runtime_requirements", target=TRACING_BENCH_TARGET skip_all)] - pub async fn process_chunks_runtime_requirements( - &mut self, - chunks: UkeySet, - entries: UkeySet, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("runtime requirements.chunks"); - let chunk_requirements = chunks - .iter() - .chain(entries.iter()) - .par_bridge() - .map(|chunk_ukey| { - let mut set = RuntimeGlobals::default(); - for mid in self.chunk_graph.get_chunk_modules_identifier(chunk_ukey) { - let chunk = self.chunk_by_ukey.expect_get(chunk_ukey); - if let Some(runtime_requirements) = - ChunkGraph::get_module_runtime_requirements(self, *mid, chunk.runtime()) - { - set.insert(*runtime_requirements); - } - } - - (*chunk_ukey, set) - }) - .collect::>(); - - for (chunk_ukey, mut set) in chunk_requirements { - plugin_driver - .compilation_hooks - .additional_chunk_runtime_requirements - .call(self, &chunk_ukey, &mut set) - .await - .map_err(|e| { - e.wrap_err("caused by plugins in Compilation.hooks.additionalChunkRuntimeRequirements") - })?; - - self - .process_runtime_requirement_hook_mut(&mut set, { - let plugin_driver = plugin_driver.clone(); - move |compilation, - all_runtime_requirements, - runtime_requirements, - runtime_requirements_mut| { - Box::pin({ - let plugin_driver = plugin_driver.clone(); - async move { - plugin_driver - .compilation_hooks - .runtime_requirement_in_chunk - .call( - compilation, - &chunk_ukey, - all_runtime_requirements, - runtime_requirements, - runtime_requirements_mut, - ) - .await - .map_err(|e| { - e.wrap_err("caused by plugins in Compilation.hooks.runtimeRequirementInChunk") - })?; - Ok(()) - } - }) - } - }) - .await?; - - ChunkGraph::set_chunk_runtime_requirements(self, chunk_ukey, set); - } - logger.time_end(start); - - let start = logger.time("runtime requirements.entries"); - for &entry_ukey in &entries { - let entry = self.chunk_by_ukey.expect_get(&entry_ukey); - let mut set = RuntimeGlobals::default(); - for chunk_ukey in entry - .get_all_referenced_chunks(&self.chunk_group_by_ukey) - .iter() - { - let runtime_requirements = ChunkGraph::get_chunk_runtime_requirements(self, chunk_ukey); - set.insert(*runtime_requirements); - } - - plugin_driver - .compilation_hooks - .additional_tree_runtime_requirements - .call(self, &entry_ukey, &mut set) - .await - .map_err(|e| { - e.wrap_err("caused by plugins in Compilation.hooks.additionalTreeRuntimeRequirements") - })?; - - self - .process_runtime_requirement_hook_mut(&mut set, { - let plugin_driver = plugin_driver.clone(); - move |compilation, - all_runtime_requirements, - runtime_requirements, - runtime_requirements_mut| { - Box::pin({ - let plugin_driver = plugin_driver.clone(); - async move { - plugin_driver - .compilation_hooks - .runtime_requirement_in_tree - .call( - compilation, - &entry_ukey, - all_runtime_requirements, - runtime_requirements, - runtime_requirements_mut, - ) - .await - .map_err(|e| { - e.wrap_err("caused by plugins in Compilation.hooks.runtimeRequirementInTree") - })?; - Ok(()) - } - }) - } - }) - .await?; - - ChunkGraph::set_tree_runtime_requirements(self, entry_ukey, set); - } - - // NOTE: webpack runs hooks.runtime_module in compilation.add_runtime_module - // and overwrite the runtime_module.generate() to get new source in create_chunk_assets - // this needs full runtime requirements, so run hooks.runtime_module after runtime_requirements_in_tree - let mut runtime_modules = mem::take(&mut self.runtime_modules); - for entry_ukey in &entries { - let runtime_module_ids: Vec<_> = self - .chunk_graph - .get_chunk_runtime_modules_iterable(entry_ukey) - .copied() - .collect(); - for runtime_module_id in runtime_module_ids { - plugin_driver - .compilation_hooks - .runtime_module - .call(self, &runtime_module_id, entry_ukey, &mut runtime_modules) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.runtimeModule"))?; - } - } - self.runtime_modules = runtime_modules; - - logger.time_end(start); - Ok(()) - } - - process_runtime_requirement_hook_macro!( - process_runtime_requirement_hook, - &Compilation, - &'a Compilation - ); - process_runtime_requirement_hook_macro!( - process_runtime_requirement_hook_mut, - &mut Compilation, - &'a mut Compilation - ); - - #[instrument(name = "Compilation:create_hash",target=TRACING_BENCH_TARGET, skip_all)] - pub async fn create_hash(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - - // Check if there are any chunks that depend on full hash, usually only runtime chunks are - // possible to depend on full hash, but for library type commonjs/module, it's possible to - // have non-runtime chunks depend on full hash, the library format plugin is using - // dependent_full_hash hook to declare it. - let mut full_hash_chunks = UkeySet::default(); - for chunk_ukey in self.chunk_by_ukey.keys() { - let chunk_dependent_full_hash = plugin_driver - .compilation_hooks - .dependent_full_hash - .call(self, chunk_ukey) - .await? - .unwrap_or_default(); - if chunk_dependent_full_hash { - full_hash_chunks.insert(*chunk_ukey); - } - } - if !full_hash_chunks.is_empty() - && let Some(diagnostic) = self.incremental.disable_passes( - IncrementalPasses::CHUNKS_HASHES, - "Chunk content that dependent on full hash", - "it requires calculating the hashes of all the chunks, which is a global effect", - ) - && let Some(diagnostic) = diagnostic - { - self.push_diagnostic(diagnostic); - } - if !self - .incremental - .passes_enabled(IncrementalPasses::CHUNKS_HASHES) - { - self.chunk_hashes_artifact.clear(); - } - - let create_hash_chunks = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::CHUNKS_HASHES) - && !self.chunk_hashes_artifact.is_empty() - { - let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ChunkRemove { chunk } => Some(*chunk), - _ => None, - }); - for removed_chunk in removed_chunks { - self.chunk_hashes_artifact.remove(&removed_chunk); - } - self - .chunk_hashes_artifact - .retain(|chunk, _| self.chunk_by_ukey.contains(chunk)); - let chunks = mutations.get_affected_chunks_with_chunk_graph(self); - tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::CHUNKS_HASHES, %mutations, ?chunks); - let logger = self.get_logger("rspack.incremental.chunksHashes"); - logger.log(format!( - "{} chunks are affected, {} in total", - chunks.len(), - self.chunk_by_ukey.len(), - )); - chunks - } else { - self.chunk_by_ukey.keys().copied().collect() - }; - - let mut compilation_hasher = RspackHash::from(&self.options.output); - - fn try_process_chunk_hash_results( - compilation: &mut Compilation, - chunk_hash_results: Vec>, - ) -> Result<()> { - for hash_result in chunk_hash_results { - let (chunk_ukey, chunk_hash_result) = hash_result?; - let chunk = compilation.chunk_by_ukey.expect_get(&chunk_ukey); - let chunk_hashes_changed = chunk.set_hashes( - &mut compilation.chunk_hashes_artifact, - chunk_hash_result.hash, - chunk_hash_result.content_hash, - ); - if chunk_hashes_changed - && let Some(mut mutations) = compilation.incremental.mutations_write() - { - mutations.add(Mutation::ChunkSetHashes { chunk: chunk_ukey }); - } - } - Ok(()) - } - - let unordered_runtime_chunks: UkeySet = self.get_chunk_graph_entries().collect(); - let start = logger.time("hashing: hash chunks"); - let other_chunks: Vec<_> = create_hash_chunks - .iter() - .filter(|key| !unordered_runtime_chunks.contains(key)) - .collect(); - - // create hash for runtime modules in other chunks - let other_chunk_runtime_module_hashes = rspack_futures::scope::<_, Result<_>>(|token| { - other_chunks - .iter() - .flat_map(|chunk| self.chunk_graph.get_chunk_runtime_modules_iterable(chunk)) - .for_each(|runtime_module_identifier| { - let s = unsafe { token.used((&self, runtime_module_identifier)) }; - s.spawn(|(compilation, runtime_module_identifier)| async { - let runtime_module = &compilation.runtime_modules[runtime_module_identifier]; - let digest = runtime_module.get_runtime_hash(compilation, None).await?; - Ok((*runtime_module_identifier, digest)) - }); - }) - }) - .await - .into_iter() - .map(|res| res.to_rspack_result()) - .collect::>>()?; - - for res in other_chunk_runtime_module_hashes { - let (runtime_module_identifier, digest) = res?; - self - .runtime_modules_hash - .insert(runtime_module_identifier, digest); - } - - // create hash for other chunks - let other_chunks_hash_results = rspack_futures::scope::<_, Result<_>>(|token| { - for chunk in other_chunks { - let s = unsafe { token.used((&self, chunk, &plugin_driver)) }; - s.spawn(|(compilation, chunk, plugin_driver)| async { - let hash_result = compilation - .process_chunk_hash(*chunk, plugin_driver) - .await?; - Ok((*chunk, hash_result)) - }); - } - }) - .await - .into_iter() - .map(|res| res.to_rspack_result()) - .collect::>>()?; - - try_process_chunk_hash_results(self, other_chunks_hash_results)?; - logger.time_end(start); - - // collect references for runtime chunks - let mut runtime_chunks_map: HashMap, u32)> = - unordered_runtime_chunks - .into_iter() - .map(|runtime_chunk| (runtime_chunk, (Vec::new(), 0))) - .collect(); - let mut remaining: u32 = 0; - for runtime_chunk_ukey in runtime_chunks_map.keys().copied().collect::>() { - let runtime_chunk = self.chunk_by_ukey.expect_get(&runtime_chunk_ukey); - let groups = runtime_chunk.get_all_referenced_async_entrypoints(&self.chunk_group_by_ukey); - for other in groups - .into_iter() - .map(|group| self.chunk_group_by_ukey.expect_get(&group)) - .map(|group| group.get_runtime_chunk(&self.chunk_group_by_ukey)) - { - let (other_referenced_by, _) = runtime_chunks_map - .get_mut(&other) - .expect("should in runtime_chunks_map"); - other_referenced_by.push(runtime_chunk_ukey); - let info = runtime_chunks_map - .get_mut(&runtime_chunk_ukey) - .expect("should in runtime_chunks_map"); - info.1 += 1; - remaining += 1; - } - } - // sort runtime chunks by its references - let mut runtime_chunks = Vec::with_capacity(runtime_chunks_map.len()); - for (runtime_chunk, (_, remaining)) in &runtime_chunks_map { - if *remaining == 0 { - runtime_chunks.push(*runtime_chunk); - } - } - let mut ready_chunks = Vec::new(); - - let mut i = 0; - while i < runtime_chunks.len() { - let chunk_ukey = runtime_chunks[i]; - let has_full_hash_modules = full_hash_chunks.contains(&chunk_ukey) - || self - .chunk_graph - .has_chunk_full_hash_modules(&chunk_ukey, &self.runtime_modules); - if has_full_hash_modules { - full_hash_chunks.insert(chunk_ukey); - } - let referenced_by = runtime_chunks_map - .get(&chunk_ukey) - .expect("should in runtime_chunks_map") - .0 - .clone(); - for other in referenced_by { - if has_full_hash_modules { - for runtime_module in self.chunk_graph.get_chunk_runtime_modules_iterable(&other) { - let runtime_module = self - .runtime_modules - .get(runtime_module) - .expect("should have runtime_module"); - if runtime_module.dependent_hash() { - full_hash_chunks.insert(other); - break; - } - } - } - remaining -= 1; - let (_, other_remaining) = runtime_chunks_map - .get_mut(&other) - .expect("should in runtime_chunks_map"); - *other_remaining -= 1; - if *other_remaining == 0 { - ready_chunks.push(other); - } - } - if !ready_chunks.is_empty() { - runtime_chunks.append(&mut ready_chunks); - } - i += 1; - } - // create warning for remaining circular references - if remaining > 0 { - let mut circular: Vec<_> = runtime_chunks_map - .iter() - .filter(|(_, (_, remaining))| *remaining != 0) - .map(|(chunk_ukey, _)| self.chunk_by_ukey.expect_get(chunk_ukey)) - .collect(); - circular.sort_unstable_by(|a, b| a.id().cmp(&b.id())); - runtime_chunks.extend(circular.iter().map(|chunk| chunk.ukey())); - let circular_names = circular - .iter() - .map(|chunk| { - chunk - .name() - .or(chunk.id().map(|id| id.as_str())) - .unwrap_or("no id chunk") - }) - .join(", "); - let error = rspack_error::Error::warning(format!( - "Circular dependency between chunks with runtime ({circular_names})\nThis prevents using hashes of each other and should be avoided." - )); - self.push_diagnostic(error.into()); - } - - // create hash for runtime chunks and the runtime modules within them - // The subsequent runtime chunks and runtime modules will depend on - // the hash results of the previous runtime chunks and runtime modules. - // Therefore, create hashes one by one in sequence. - let start = logger.time("hashing: hash runtime chunks"); - for runtime_chunk_ukey in runtime_chunks { - let runtime_module_hashes = rspack_futures::scope::<_, Result<_>>(|token| { - self - .chunk_graph - .get_chunk_runtime_modules_iterable(&runtime_chunk_ukey) - .for_each(|runtime_module_identifier| { - let s = unsafe { token.used((&self, runtime_module_identifier)) }; - s.spawn(|(compilation, runtime_module_identifier)| async { - let runtime_module = &compilation.runtime_modules[runtime_module_identifier]; - let digest = runtime_module.get_runtime_hash(compilation, None).await?; - Ok((*runtime_module_identifier, digest)) - }); - }) - }) - .await - .into_iter() - .map(|res| res.to_rspack_result()) - .collect::>>()?; - - for res in runtime_module_hashes { - let (mid, digest) = res?; - self.runtime_modules_hash.insert(mid, digest); - } - - let chunk_hash_result = self - .process_chunk_hash(runtime_chunk_ukey, &plugin_driver) - .await?; - let chunk = self.chunk_by_ukey.expect_get(&runtime_chunk_ukey); - let chunk_hashes_changed = chunk.set_hashes( - &mut self.chunk_hashes_artifact, - chunk_hash_result.hash, - chunk_hash_result.content_hash, - ); - if chunk_hashes_changed && let Some(mut mutations) = self.incremental.mutations_write() { - mutations.add(Mutation::ChunkSetHashes { - chunk: runtime_chunk_ukey, - }); - } - } - logger.time_end(start); - - // create full hash - self - .chunk_by_ukey - .values() - .sorted_unstable_by_key(|chunk| chunk.ukey()) - .filter_map(|chunk| chunk.hash(&self.chunk_hashes_artifact)) - .for_each(|hash| { - hash.hash(&mut compilation_hasher); - }); - self.hot_index.hash(&mut compilation_hasher); - self.hash = Some(compilation_hasher.digest(&self.options.output.hash_digest)); - - // re-create runtime chunk hash that depend on full hash - let start = logger.time("hashing: process full hash chunks"); - for chunk_ukey in full_hash_chunks { - for runtime_module_identifier in self - .chunk_graph - .get_chunk_runtime_modules_iterable(&chunk_ukey) - { - let runtime_module = &self.runtime_modules[runtime_module_identifier]; - if runtime_module.full_hash() || runtime_module.dependent_hash() { - let digest = runtime_module.get_runtime_hash(self, None).await?; - self - .runtime_modules_hash - .insert(*runtime_module_identifier, digest); - } - } - let chunk = self.chunk_by_ukey.expect_get(&chunk_ukey); - let new_chunk_hash = { - let chunk_hash = chunk - .hash(&self.chunk_hashes_artifact) - .expect("should have chunk hash"); - let mut hasher = RspackHash::from(&self.options.output); - chunk_hash.hash(&mut hasher); - self.hash.hash(&mut hasher); - hasher.digest(&self.options.output.hash_digest) - }; - let new_content_hash = { - let content_hash = chunk - .content_hash(&self.chunk_hashes_artifact) - .expect("should have content hash"); - content_hash - .iter() - .map(|(source_type, content_hash)| { - let mut hasher = RspackHash::from(&self.options.output); - content_hash.hash(&mut hasher); - self.hash.hash(&mut hasher); - ( - *source_type, - hasher.digest(&self.options.output.hash_digest), - ) - }) - .collect() - }; - let chunk_hashes_changed = chunk.set_hashes( - &mut self.chunk_hashes_artifact, - new_chunk_hash, - new_content_hash, - ); - if chunk_hashes_changed && let Some(mut mutations) = self.incremental.mutations_write() { - mutations.add(Mutation::ChunkSetHashes { chunk: chunk_ukey }); - } - } - logger.time_end(start); - Ok(()) - } - - #[instrument(skip_all)] - pub async fn runtime_modules_code_generation(&mut self) -> Result<()> { - let results = rspack_futures::scope::<_, Result<_>>(|token| { - self - .runtime_modules - .iter() - .for_each(|(runtime_module_identifier, runtime_module)| { - let s = unsafe { token.used((&self, runtime_module_identifier, runtime_module)) }; - s.spawn( - |(compilation, runtime_module_identifier, runtime_module)| async { - let result = runtime_module - .code_generation(compilation, None, None) - .await?; - let source = result - .get(&SourceType::Runtime) - .expect("should have source"); - Ok((*runtime_module_identifier, source.clone())) - }, - ) - }) - }) - .await - .into_iter() - .map(|res| res.to_rspack_result()) - .collect::>>()?; - - let mut runtime_module_sources = IdentifierMap::::default(); - for result in results { - let (runtime_module_identifier, source) = result?; - runtime_module_sources.insert(runtime_module_identifier, source); - } - - self.runtime_modules_code_generation_source = runtime_module_sources; - self - .code_generated_modules - .extend(self.runtime_modules.keys().copied()); - Ok(()) - } - - async fn process_chunk_hash( - &self, - chunk_ukey: ChunkUkey, - plugin_driver: &SharedPluginDriver, - ) -> Result { - let mut hasher = RspackHash::from(&self.options.output); - if let Some(chunk) = self.chunk_by_ukey.get(&chunk_ukey) { - chunk.update_hash(&mut hasher, self); - } - plugin_driver - .compilation_hooks - .chunk_hash - .call(self, &chunk_ukey, &mut hasher) - .await?; - let chunk_hash = hasher.digest(&self.options.output.hash_digest); - - let mut content_hashes: HashMap = HashMap::default(); - plugin_driver - .compilation_hooks - .content_hash - .call(self, &chunk_ukey, &mut content_hashes) - .await?; - - let content_hashes = content_hashes - .into_iter() - .map(|(t, mut hasher)| { - chunk_hash.hash(&mut hasher); - (t, hasher.digest(&self.options.output.hash_digest)) - }) - .collect(); - - Ok(ChunkHashResult { - hash: chunk_hash, - content_hash: content_hashes, - }) - } - - #[instrument("Compilation:create_module_hashes", skip_all)] - pub async fn create_module_hashes(&mut self, modules: IdentifierSet) -> Result<()> { - let mg = self.get_module_graph(); - let chunk_graph = &self.chunk_graph; - let chunk_by_ukey = &self.chunk_by_ukey; - - let results = rspack_futures::scope::<_, Result<_>>(|token| { - for module_identifier in modules { - let s = unsafe { token.used((&*self, &mg, chunk_graph, chunk_by_ukey)) }; - s.spawn( - move |(compilation, mg, chunk_graph, chunk_by_ukey)| async move { - let mut hashes = RuntimeSpecMap::new(); - let module = mg - .module_by_identifier(&module_identifier) - .expect("should have module"); - for runtime in chunk_graph.get_module_runtimes_iter(module_identifier, chunk_by_ukey) { - let hash = module.get_runtime_hash(compilation, Some(runtime)).await?; - hashes.set(runtime.clone(), hash); - } - Ok((module_identifier, hashes)) - }, - ); - } - }) - .await - .into_iter() - .map(|r| r.to_rspack_result()) - .collect::>>()?; - - for result in results { - let (module, hashes) = result?; - if ChunkGraph::set_module_hashes(self, module, hashes) - && let Some(mut mutations) = self.incremental.mutations_write() - { - mutations.add(Mutation::ModuleSetHashes { module }); - } - } - Ok(()) - } - pub fn add_runtime_module( &mut self, chunk_ukey: &ChunkUkey, @@ -3206,11 +1468,6 @@ pub struct RenderManifestEntry { pub auxiliary: bool, } -pub struct ChunkHashResult { - pub hash: RspackHashDigest, - pub content_hash: ChunkContentHash, -} - #[cacheable] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] pub enum ManifestAssetType { diff --git a/crates/rspack_core/src/compilation/module_ids/mod.rs b/crates/rspack_core/src/compilation/module_ids/mod.rs new file mode 100644 index 000000000000..05b31be63129 --- /dev/null +++ b/crates/rspack_core/src/compilation/module_ids/mod.rs @@ -0,0 +1,30 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn module_ids_pass(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("module ids"); + + // Check if MODULE_IDS pass is disabled, and clear artifact if needed + if !self + .incremental + .passes_enabled(IncrementalPasses::MODULE_IDS) + { + self.module_ids_artifact.clear(); + } + + let mut diagnostics = vec![]; + let mut module_ids_artifact = mem::take(&mut self.module_ids_artifact); + plugin_driver + .compilation_hooks + .module_ids + .call(self, &mut module_ids_artifact, &mut diagnostics) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.moduleIds"))?; + self.module_ids_artifact = module_ids_artifact; + self.extend_diagnostics(diagnostics); + logger.time_end(start); + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs b/crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs new file mode 100644 index 000000000000..27d243cf089c --- /dev/null +++ b/crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs @@ -0,0 +1,16 @@ +use super::*; + +impl Compilation { + pub async fn optimize_chunk_modules_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + plugin_driver + .compilation_hooks + .optimize_chunk_modules + .call(self) + .await + .map(|_| ()) + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunkModules")) + } +} diff --git a/crates/rspack_core/src/compilation/optimize_chunks/mod.rs b/crates/rspack_core/src/compilation/optimize_chunks/mod.rs new file mode 100644 index 000000000000..15340763b8d4 --- /dev/null +++ b/crates/rspack_core/src/compilation/optimize_chunks/mod.rs @@ -0,0 +1,19 @@ +use super::*; + +impl Compilation { + pub async fn optimize_chunks_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + while matches!( + plugin_driver + .compilation_hooks + .optimize_chunks + .call(self) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunks"))?, + Some(true) + ) {} + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/optimize_code_generation/mod.rs b/crates/rspack_core/src/compilation/optimize_code_generation/mod.rs new file mode 100644 index 000000000000..d92cfa60d44d --- /dev/null +++ b/crates/rspack_core/src/compilation/optimize_code_generation/mod.rs @@ -0,0 +1,20 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn optimize_code_generation_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("optimize code generation"); + plugin_driver + .compilation_hooks + .optimize_code_generation + .call(self) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeCodeGeneration"))?; + logger.time_end(start); + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/optimize_dependencies/mod.rs b/crates/rspack_core/src/compilation/optimize_dependencies/mod.rs new file mode 100644 index 000000000000..8d9bdb23ea0d --- /dev/null +++ b/crates/rspack_core/src/compilation/optimize_dependencies/mod.rs @@ -0,0 +1,41 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn optimize_dependencies_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("optimize dependencies"); + // https://github.com/webpack/webpack/blob/d15c73469fd71cf98734685225250148b68ddc79/lib/Compilation.js#L2812-L2814 + + let mut diagnostics: Vec = vec![]; + let mut side_effects_optimize_artifact = self.side_effects_optimize_artifact.take(); + let mut build_module_graph_artifact = self.build_module_graph_artifact.take(); + while matches!( + plugin_driver + .compilation_hooks + .optimize_dependencies + .call( + self, + &mut side_effects_optimize_artifact, + &mut build_module_graph_artifact, + &mut diagnostics + ) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeDependencies"))?, + Some(true) + ) {} + self + .side_effects_optimize_artifact + .replace(side_effects_optimize_artifact); + self + .build_module_graph_artifact + .replace(build_module_graph_artifact); + self.extend_diagnostics(diagnostics); + + logger.time_end(start); + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/optimize_modules/mod.rs b/crates/rspack_core/src/compilation/optimize_modules/mod.rs new file mode 100644 index 000000000000..e162e9512ac4 --- /dev/null +++ b/crates/rspack_core/src/compilation/optimize_modules/mod.rs @@ -0,0 +1,27 @@ +use super::*; + +impl Compilation { + pub async fn optimize_modules_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let mut diagnostics = vec![]; + while matches!( + plugin_driver + .compilation_hooks + .optimize_modules + .call(self, &mut diagnostics) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeModules"))?, + Some(true) + ) {} + self.extend_diagnostics(diagnostics); + + plugin_driver + .compilation_hooks + .after_optimize_modules + .call(self) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterOptimizeModules")) + } +} diff --git a/crates/rspack_core/src/compilation/optimize_tree/mod.rs b/crates/rspack_core/src/compilation/optimize_tree/mod.rs new file mode 100644 index 000000000000..f127f8b716ac --- /dev/null +++ b/crates/rspack_core/src/compilation/optimize_tree/mod.rs @@ -0,0 +1,15 @@ +use super::*; + +impl Compilation { + pub async fn optimize_tree_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + plugin_driver + .compilation_hooks + .optimize_tree + .call(self) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeTree")) + } +} diff --git a/crates/rspack_core/src/compilation/process_assets/mod.rs b/crates/rspack_core/src/compilation/process_assets/mod.rs new file mode 100644 index 000000000000..009d3057b868 --- /dev/null +++ b/crates/rspack_core/src/compilation/process_assets/mod.rs @@ -0,0 +1,43 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn process_assets_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("process assets"); + self.process_assets(plugin_driver.clone()).await?; + logger.time_end(start); + + let start = logger.time("after process assets"); + self.after_process_assets(plugin_driver).await?; + logger.time_end(start); + Ok(()) + } + + #[instrument("Compilation:process_assets",target=TRACING_BENCH_TARGET, skip_all)] + async fn process_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + plugin_driver + .compilation_hooks + .process_assets + .call(self) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.processAssets")) + } + + #[instrument("Compilation:after_process_assets", skip_all)] + async fn after_process_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + let mut diagnostics: Vec = vec![]; + + let res = plugin_driver + .compilation_hooks + .after_process_assets + .call(self, &mut diagnostics) + .await; + + self.extend_diagnostics(diagnostics); + res + } +} diff --git a/crates/rspack_core/src/compilation/rspack_passes.md b/crates/rspack_core/src/compilation/rspack_passes.md index 49658e37d716..e1ab2125ddfe 100644 --- a/crates/rspack_core/src/compilation/rspack_passes.md +++ b/crates/rspack_core/src/compilation/rspack_passes.md @@ -10,24 +10,44 @@ The compilation process is organized into independent modules, each responsible ``` compilation/ -├── mod.rs # Main Compilation struct which expose compilation api -|-- run_passes.rs # passes driver which calls other pass -├── build_module_graph/ # Module graph construction -|-- finish_module/ # Finish Module Graph Construction collect async_modules and dependencies_diagnostics -├── optimize_dependencies/ # optimization dependencies which includes collect side_effect_optimization info -├── build_chunk_graph/ # Chunk graph construction (code splitting) -|── optimize_modules/ # which includes optimize_modules and after_optimize_modules hooks -├── optimize_chunks/ # which includes optimize_chunks hooks -├── optimize_tree/ # which includes optimize_tree hooks -├── optimize_chunk_modules # which includes optimize_chunk_modules hooks -├── module_ids/ # Module ID assignment -├── chunk_ids/ # Chunk ID assignment -├── assign_runtime_ids/ # Runtime ID assignment -├── optimize_code_generation/ # Code generation optimization -├── create_module_hashes/ # Module hash computation -├── code_generation/ # Code generation for modules -├── runtime_requirements/ # Runtime requirements processing -├── create_hash/ # Chunk hash computation -├── create_chunk_assets/ # Asset creation -└── process_assets/ # Asset processing hooks +├── mod.rs # Main Compilation struct which exposes the public API +├── run_passes.rs # Pass driver which calls all pass(which includes make and seal) +├── build_module_graph/ # Module graph construction +│ └── finish_module/ # Finalize module graph, async modules, dependency diagnostics +├── optimize_dependencies/ # optimizeDependencies hook + side effects artifact +├── build_chunk_graph/ # Chunk graph construction (code splitting cache + pass wrapper) +├── optimize_modules/ # optimizeModules + afterOptimizeModules hooks +├── optimize_chunks/ # optimizeChunks hook +├── optimize_tree/ # optimizeTree hook +├── optimize_chunk_modules # optimizeChunkModules hook +├── module_ids/ # Module ID assignment + diagnostics +├── chunk_ids/ # Chunk ID assignment + diagnostics +├── assign_runtime_ids/ # Runtime ID assignment for runtime chunks +├── optimize_code_generation/ # optimizeCodeGeneration hook +├── create_module_hashes/ # Module hash computation (incremental aware) +├── code_generation/ # Module codegen + afterCodeGeneration hook +├── runtime_requirements/ # Module/chunk/tree runtime requirements + runtime modules +├── create_hash/ # Chunk hashing, runtime module hashes, full hash + runtime module codegen +├── create_module_assets/ # Emit module-declared assets and mark chunk auxiliary files +├── create_chunk_assets/ # Render manifests and emit chunk assets +├── process_assets/ # processAssets + afterProcessAssets hooks +└── after_seal/ # afterSeal hook ``` + +## Pass Order + +`run_passes` orchestrates passes after `CompilationHooks::seal` in this order: + +1. `optimize_dependencies_pass` +2. `build_chunk_graph_pass` → `optimize_modules_pass` → `optimize_chunks_pass` +3. `optimize_tree_pass` → `optimize_chunk_modules_pass` +4. `module_ids_pass` → `chunk_ids_pass` → `assign_runtime_ids` +5. `optimize_code_generation_pass` +6. `create_module_hashes_pass` +7. `code_generation_pass` +8. `runtime_requirements_pass` +9. `create_hash_pass` (also runs runtime module code generation) +10. `create_module_assets_pass` +11. `create_chunk_assets_pass` +12. `process_assets_pass` +13. `after_seal_pass` diff --git a/crates/rspack_core/src/compilation/run_passes.rs b/crates/rspack_core/src/compilation/run_passes.rs new file mode 100644 index 000000000000..c84e6baea21b --- /dev/null +++ b/crates/rspack_core/src/compilation/run_passes.rs @@ -0,0 +1,55 @@ +use super::*; +use crate::logger::Logger; + +impl Compilation { + pub async fn run_passes(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + self + .optimize_dependencies_pass(plugin_driver.clone()) + .await?; + + let logger = self.get_logger("rspack.Compilation"); + + let create_chunks_start = logger.time("create chunks"); + self.build_chunk_graph_pass().await?; + self + .optimize_modules_pass(plugin_driver.clone()) + .await?; + self + .optimize_chunks_pass(plugin_driver.clone()) + .await?; + logger.time_end(create_chunks_start); + + let optimize_start = logger.time("optimize"); + self + .optimize_tree_pass(plugin_driver.clone()) + .await?; + self + .optimize_chunk_modules_pass(plugin_driver.clone()) + .await?; + logger.time_end(optimize_start); + + self.module_ids_pass(plugin_driver.clone()).await?; + self.chunk_ids_pass(plugin_driver.clone()).await?; + self.assign_runtime_ids(); + + self + .optimize_code_generation_pass(plugin_driver.clone()) + .await?; + self.create_module_hashes_pass().await?; + self.code_generation_pass(plugin_driver.clone()).await?; + self + .runtime_requirements_pass(plugin_driver.clone()) + .await?; + self.create_hash_pass(plugin_driver.clone()).await?; + self + .create_module_assets_pass(plugin_driver.clone()) + .await?; + self + .create_chunk_assets_pass(plugin_driver.clone()) + .await?; + self.process_assets_pass(plugin_driver.clone()).await?; + self.after_seal_pass(plugin_driver).await?; + + Ok(()) + } +} diff --git a/crates/rspack_core/src/compilation/runtime_requirements/mod.rs b/crates/rspack_core/src/compilation/runtime_requirements/mod.rs new file mode 100644 index 000000000000..c06fca20224d --- /dev/null +++ b/crates/rspack_core/src/compilation/runtime_requirements/mod.rs @@ -0,0 +1,395 @@ +use super::*; +use crate::logger::Logger; + +macro_rules! process_runtime_requirement_hook_macro { + ($name: ident, $s: ty, $c: ty) => { + async fn $name( + self: $s, + requirements: &mut RuntimeGlobals, + call_hook: impl for<'a> Fn( + $c, + &'a RuntimeGlobals, + &'a RuntimeGlobals, + &'a mut RuntimeGlobals, + ) -> BoxFuture<'a, Result<()>>, + ) -> Result<()> { + let mut runtime_requirements_mut = *requirements; + let mut runtime_requirements; + + loop { + runtime_requirements = runtime_requirements_mut; + runtime_requirements_mut = RuntimeGlobals::default(); + // runtime_requirements: rt_requirements of last time + // runtime_requirements_mut: changed rt_requirements + // requirements: all rt_requirements + call_hook( + self, + requirements, + &runtime_requirements, + &mut runtime_requirements_mut, + ) + .await?; + + // check if we have changes to runtime_requirements + runtime_requirements_mut = + runtime_requirements_mut.difference(requirements.intersection(runtime_requirements_mut)); + if runtime_requirements_mut.is_empty() { + break; + } else { + requirements.insert(runtime_requirements_mut); + } + } + Ok(()) + } + }; +} + +impl Compilation { + pub async fn runtime_requirements_pass( + &mut self, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("runtime requirements"); + let process_runtime_requirements_modules = if let Some(mutations) = self + .incremental + .mutations_read(IncrementalPasses::MODULES_RUNTIME_REQUIREMENTS) + && !self.cgm_runtime_requirements_artifact.is_empty() + { + let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ModuleRemove { module } => Some(*module), + _ => None, + }); + for revoked_module in revoked_modules { + self + .cgm_runtime_requirements_artifact + .remove(&revoked_module); + } + let modules: IdentifierSet = mutations + .iter() + .filter_map(|mutation| match mutation { + Mutation::ModuleSetHashes { module } => Some(*module), + _ => None, + }) + .collect(); + let logger = self.get_logger("rspack.incremental.modulesRuntimeRequirements"); + logger.log(format!( + "{} modules are affected, {} in total", + modules.len(), + self.get_module_graph().modules().len() + )); + modules + } else { + self.cgm_runtime_requirements_artifact = Default::default(); + self.get_module_graph().modules().keys().copied().collect() + }; + self + .process_modules_runtime_requirements( + process_runtime_requirements_modules, + plugin_driver.clone(), + ) + .await?; + let runtime_chunks = self.get_chunk_graph_entries().collect(); + + // Check if CHUNKS_RUNTIME_REQUIREMENTS pass is disabled, and clear artifact if needed + if !self + .incremental + .passes_enabled(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) + { + self.cgc_runtime_requirements_artifact.clear(); + } + + let process_runtime_requirements_chunks = if let Some(mutations) = self + .incremental + .mutations_read(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) + && !self.cgc_runtime_requirements_artifact.is_empty() + { + let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ChunkRemove { chunk } => Some(chunk), + _ => None, + }); + for removed_chunk in removed_chunks { + self.cgc_runtime_requirements_artifact.remove(removed_chunk); + } + let affected_chunks = mutations.get_affected_chunks_with_chunk_graph(self); + for affected_chunk in &affected_chunks { + self + .cgc_runtime_requirements_artifact + .remove(affected_chunk); + } + for runtime_chunk in &runtime_chunks { + self.cgc_runtime_requirements_artifact.remove(runtime_chunk); + } + self + .cgc_runtime_requirements_artifact + .retain(|chunk, _| self.chunk_by_ukey.contains(chunk)); + let logger = self.get_logger("rspack.incremental.chunksRuntimeRequirements"); + logger.log(format!( + "{} chunks are affected, {} in total", + affected_chunks.len(), + self.chunk_by_ukey.len() + )); + affected_chunks + } else { + self.chunk_by_ukey.keys().copied().collect() + }; + self + .process_chunks_runtime_requirements( + process_runtime_requirements_chunks, + runtime_chunks, + plugin_driver.clone(), + ) + .await?; + logger.time_end(start); + Ok(()) + } + + #[instrument("Compilation:process_modules_runtime_requirements", skip_all)] + pub async fn process_modules_runtime_requirements( + &mut self, + modules: IdentifierSet, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("runtime requirements.modules"); + + let module_results = rspack_futures::scope::<_, Result<_>>(|token| { + modules + .into_iter() + .filter(|module| self.chunk_graph.get_number_of_module_chunks(*module) > 0) + .for_each(|module| { + let s = unsafe { token.used((&self, &plugin_driver)) }; + s.spawn(move |(compilation, plugin_driver)| async move { + let mut map = RuntimeSpecMap::new(); + let runtimes = compilation + .chunk_graph + .get_module_runtimes_iter(module, &compilation.chunk_by_ukey); + for runtime in runtimes { + let runtime_requirements = compilation + .old_cache + .process_runtime_requirements_occasion + .use_cache(module, runtime, compilation, || async { + let mut runtime_requirements = compilation + .code_generation_results + .get_runtime_requirements(&module, Some(runtime)); + + plugin_driver + .compilation_hooks + .additional_module_runtime_requirements + .call(compilation, &module, &mut runtime_requirements) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.additionalModuleRuntimeRequirements"))?; + + compilation + .process_runtime_requirement_hook(&mut runtime_requirements, { + let plugin_driver = plugin_driver.clone(); + move |compilation, + all_runtime_requirements, + runtime_requirements, + runtime_requirements_mut| { + Box::pin({ + let plugin_driver = plugin_driver.clone(); + async move { + plugin_driver + .compilation_hooks + .runtime_requirement_in_module + .call( + compilation, + &module, + all_runtime_requirements, + runtime_requirements, + runtime_requirements_mut, + ) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.runtimeRequirementInModule"))?; + Ok(()) + }}) + } + }) + .await?; + Ok(runtime_requirements) + }) + .await?; + map.set(runtime.clone(), runtime_requirements); + } + Ok((module, map)) + }); + }); + }) + .await + .into_iter() + .map(|r| r.to_rspack_result()) + .collect::>>()?; + + for entry in module_results { + let (module, map) = entry?; + ChunkGraph::set_module_runtime_requirements(self, module, map); + } + logger.time_end(start); + Ok(()) + } + + #[instrument(name = "Compilation:process_chunks_runtime_requirements", target=TRACING_BENCH_TARGET skip_all)] + pub async fn process_chunks_runtime_requirements( + &mut self, + chunks: UkeySet, + entries: UkeySet, + plugin_driver: SharedPluginDriver, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compilation"); + let start = logger.time("runtime requirements.chunks"); + let chunk_requirements = chunks + .iter() + .chain(entries.iter()) + .par_bridge() + .map(|chunk_ukey| { + let mut set = RuntimeGlobals::default(); + for mid in self.chunk_graph.get_chunk_modules_identifier(chunk_ukey) { + let chunk = self.chunk_by_ukey.expect_get(chunk_ukey); + if let Some(runtime_requirements) = + ChunkGraph::get_module_runtime_requirements(self, *mid, chunk.runtime()) + { + set.insert(*runtime_requirements); + } + } + + (*chunk_ukey, set) + }) + .collect::>(); + + for (chunk_ukey, mut set) in chunk_requirements { + plugin_driver + .compilation_hooks + .additional_chunk_runtime_requirements + .call(self, &chunk_ukey, &mut set) + .await + .map_err(|e| { + e.wrap_err("caused by plugins in Compilation.hooks.additionalChunkRuntimeRequirements") + })?; + + self + .process_runtime_requirement_hook_mut(&mut set, { + let plugin_driver = plugin_driver.clone(); + move |compilation, + all_runtime_requirements, + runtime_requirements, + runtime_requirements_mut| { + Box::pin({ + let plugin_driver = plugin_driver.clone(); + async move { + plugin_driver + .compilation_hooks + .runtime_requirement_in_chunk + .call( + compilation, + &chunk_ukey, + all_runtime_requirements, + runtime_requirements, + runtime_requirements_mut, + ) + .await + .map_err(|e| { + e.wrap_err("caused by plugins in Compilation.hooks.runtimeRequirementInChunk") + })?; + Ok(()) + } + }) + } + }) + .await?; + + ChunkGraph::set_chunk_runtime_requirements(self, chunk_ukey, set); + } + logger.time_end(start); + + let start = logger.time("runtime requirements.entries"); + for &entry_ukey in &entries { + let entry = self.chunk_by_ukey.expect_get(&entry_ukey); + let mut set = RuntimeGlobals::default(); + for chunk_ukey in entry + .get_all_referenced_chunks(&self.chunk_group_by_ukey) + .iter() + { + let runtime_requirements = ChunkGraph::get_chunk_runtime_requirements(self, chunk_ukey); + set.insert(*runtime_requirements); + } + + plugin_driver + .compilation_hooks + .additional_tree_runtime_requirements + .call(self, &entry_ukey, &mut set) + .await + .map_err(|e| { + e.wrap_err("caused by plugins in Compilation.hooks.additionalTreeRuntimeRequirements") + })?; + + self + .process_runtime_requirement_hook_mut(&mut set, { + let plugin_driver = plugin_driver.clone(); + move |compilation, + all_runtime_requirements, + runtime_requirements, + runtime_requirements_mut| { + Box::pin({ + let plugin_driver = plugin_driver.clone(); + async move { + plugin_driver + .compilation_hooks + .runtime_requirement_in_tree + .call( + compilation, + &entry_ukey, + all_runtime_requirements, + runtime_requirements, + runtime_requirements_mut, + ) + .await + .map_err(|e| { + e.wrap_err("caused by plugins in Compilation.hooks.runtimeRequirementInTree") + })?; + Ok(()) + } + }) + } + }) + .await?; + + ChunkGraph::set_tree_runtime_requirements(self, entry_ukey, set); + } + + // NOTE: webpack runs hooks.runtime_module in compilation.add_runtime_module + // and overwrite the runtime_module.generate() to get new source in create_chunk_assets + // this needs full runtime requirements, so run hooks.runtime_module after runtime_requirements_in_tree + let mut runtime_modules = mem::take(&mut self.runtime_modules); + for entry_ukey in &entries { + let runtime_module_ids: Vec<_> = self + .chunk_graph + .get_chunk_runtime_modules_iterable(entry_ukey) + .copied() + .collect(); + for runtime_module_id in runtime_module_ids { + plugin_driver + .compilation_hooks + .runtime_module + .call(self, &runtime_module_id, entry_ukey, &mut runtime_modules) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.runtimeModule"))?; + } + } + self.runtime_modules = runtime_modules; + + logger.time_end(start); + Ok(()) + } + + process_runtime_requirement_hook_macro!( + process_runtime_requirement_hook, + &Compilation, + &'a Compilation + ); + process_runtime_requirement_hook_macro!( + process_runtime_requirement_hook_mut, + &mut Compilation, + &'a mut Compilation + ); +} From c3866e5361d6c2731abdcaa8b9001a4b8041d888 Mon Sep 17 00:00:00 2001 From: hardfist Date: Thu, 15 Jan 2026 00:49:38 +0800 Subject: [PATCH 3/6] chore: use function --- .../src/compilation/after_seal/mod.rs | 22 +- .../src/compilation/assign_runtime_ids/mod.rs | 74 +++---- .../src/compilation/build_chunk_graph/pass.rs | 46 ++-- .../src/compilation/chunk_ids/mod.rs | 63 +++--- .../src/compilation/code_generation/mod.rs | 111 +++++----- .../compilation/create_chunk_assets/mod.rs | 22 +- .../src/compilation/create_hash/mod.rs | 21 +- .../compilation/create_module_assets/mod.rs | 22 +- .../compilation/create_module_hashes/mod.rs | 162 +++++++------- crates/rspack_core/src/compilation/mod.rs | 52 +---- .../src/compilation/module_ids/mod.rs | 49 +++-- .../compilation/optimize_chunk_modules/mod.rs | 24 +- .../src/compilation/optimize_chunks/mod.rs | 30 ++- .../optimize_code_generation/mod.rs | 30 ++- .../compilation/optimize_dependencies/mod.rs | 68 +++--- .../src/compilation/optimize_modules/mod.rs | 40 ++-- .../src/compilation/optimize_tree/mod.rs | 22 +- .../src/compilation/process_assets/mod.rs | 28 +-- .../src/compilation/rspack_passes.md | 36 +-- .../rspack_core/src/compilation/run_passes.rs | 92 ++++---- .../compilation/runtime_requirements/mod.rs | 207 +++++++++--------- crates/rspack_core/src/compiler/mod.rs | 51 +++-- 22 files changed, 647 insertions(+), 625 deletions(-) diff --git a/crates/rspack_core/src/compilation/after_seal/mod.rs b/crates/rspack_core/src/compilation/after_seal/mod.rs index 529bb632c315..fdda5bdb2163 100644 --- a/crates/rspack_core/src/compilation/after_seal/mod.rs +++ b/crates/rspack_core/src/compilation/after_seal/mod.rs @@ -1,18 +1,18 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn after_seal_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("after seal"); - self.after_seal(plugin_driver).await?; - logger.time_end(start); - Ok(()) - } +pub async fn after_seal_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("after seal"); + compilation.after_seal(plugin_driver).await?; + logger.time_end(start); + Ok(()) +} +impl Compilation { #[instrument("Compilation:after_seal", target=TRACING_BENCH_TARGET,skip_all)] async fn after_seal(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { plugin_driver.compilation_hooks.after_seal.call(self).await diff --git a/crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs b/crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs index 4ca5cbba70c4..19f63e10730c 100644 --- a/crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs +++ b/crates/rspack_core/src/compilation/assign_runtime_ids/mod.rs @@ -1,44 +1,42 @@ use super::*; -impl Compilation { - pub fn assign_runtime_ids(&mut self) { - fn process_entrypoint( - entrypoint_ukey: &ChunkGroupUkey, - chunk_group_by_ukey: &ChunkGroupByUkey, - chunk_by_ukey: &ChunkByUkey, - chunk_graph: &mut ChunkGraph, +pub fn assign_runtime_ids(compilation: &mut Compilation) { + fn process_entrypoint( + entrypoint_ukey: &ChunkGroupUkey, + chunk_group_by_ukey: &ChunkGroupByUkey, + chunk_by_ukey: &ChunkByUkey, + chunk_graph: &mut ChunkGraph, + ) { + let entrypoint = chunk_group_by_ukey.expect_get(entrypoint_ukey); + let runtime = entrypoint + .kind + .get_entry_options() + .and_then(|o| match &o.runtime { + Some(EntryRuntime::String(s)) => Some(s.to_owned()), + _ => None, + }) + .or(entrypoint.name().map(|n| n.to_string())); + if let (Some(runtime), Some(chunk)) = ( + runtime, + chunk_by_ukey.get(&entrypoint.get_runtime_chunk(chunk_group_by_ukey)), ) { - let entrypoint = chunk_group_by_ukey.expect_get(entrypoint_ukey); - let runtime = entrypoint - .kind - .get_entry_options() - .and_then(|o| match &o.runtime { - Some(EntryRuntime::String(s)) => Some(s.to_owned()), - _ => None, - }) - .or(entrypoint.name().map(|n| n.to_string())); - if let (Some(runtime), Some(chunk)) = ( - runtime, - chunk_by_ukey.get(&entrypoint.get_runtime_chunk(chunk_group_by_ukey)), - ) { - chunk_graph.set_runtime_id(runtime, chunk.id().map(|id| id.to_string())); - } - } - for i in self.entrypoints.iter() { - process_entrypoint( - i.1, - &self.chunk_group_by_ukey, - &self.chunk_by_ukey, - &mut self.chunk_graph, - ) - } - for i in self.async_entrypoints.iter() { - process_entrypoint( - i, - &self.chunk_group_by_ukey, - &self.chunk_by_ukey, - &mut self.chunk_graph, - ) + chunk_graph.set_runtime_id(runtime, chunk.id().map(|id| id.to_string())); } } + for i in compilation.entrypoints.iter() { + process_entrypoint( + i.1, + &compilation.chunk_group_by_ukey, + &compilation.chunk_by_ukey, + &mut compilation.chunk_graph, + ) + } + for i in compilation.async_entrypoints.iter() { + process_entrypoint( + i, + &compilation.chunk_group_by_ukey, + &compilation.chunk_by_ukey, + &mut compilation.chunk_graph, + ) + } } diff --git a/crates/rspack_core/src/compilation/build_chunk_graph/pass.rs b/crates/rspack_core/src/compilation/build_chunk_graph/pass.rs index ea6660f342d7..f6a5380e1368 100644 --- a/crates/rspack_core/src/compilation/build_chunk_graph/pass.rs +++ b/crates/rspack_core/src/compilation/build_chunk_graph/pass.rs @@ -1,26 +1,26 @@ -use crate::compilation::build_chunk_graph::{ - artifact::use_code_splitting_cache, - build_chunk_graph, -}; -use crate::compilation::Compilation; -use crate::logger::Logger; use rspack_error::Result; -impl Compilation { - pub async fn build_chunk_graph_pass(&mut self) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - self.module_graph_cache_artifact.freeze(); - use_code_splitting_cache(self, |compilation| async { - let start = logger.time("rebuild chunk graph"); - build_chunk_graph(compilation)?; - compilation - .chunk_graph - .generate_dot(compilation, "after-code-splitting") - .await; - logger.time_end(start); - Ok(compilation) - }) - .await?; - Ok(()) - } +use crate::{ + compilation::{ + Compilation, + build_chunk_graph::{artifact::use_code_splitting_cache, build_chunk_graph}, + }, + logger::Logger, +}; + +pub async fn build_chunk_graph_pass(compilation: &mut Compilation) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + compilation.module_graph_cache_artifact.freeze(); + use_code_splitting_cache(compilation, |compilation| async { + let start = logger.time("rebuild chunk graph"); + build_chunk_graph(compilation)?; + compilation + .chunk_graph + .generate_dot(compilation, "after-code-splitting") + .await; + logger.time_end(start); + Ok(compilation) + }) + .await?; + Ok(()) } diff --git a/crates/rspack_core/src/compilation/chunk_ids/mod.rs b/crates/rspack_core/src/compilation/chunk_ids/mod.rs index 7a8b7cda7a07..900093978765 100644 --- a/crates/rspack_core/src/compilation/chunk_ids/mod.rs +++ b/crates/rspack_core/src/compilation/chunk_ids/mod.rs @@ -1,37 +1,38 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn chunk_ids_pass(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("chunk ids"); +pub async fn chunk_ids_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("chunk ids"); - // Check if CHUNK_IDS pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::CHUNK_IDS) - { - self.named_chunk_ids_artifact.clear(); - } - - let mut diagnostics = vec![]; - let mut chunk_by_ukey = mem::take(&mut self.chunk_by_ukey); - let mut named_chunk_ids_artifact = mem::take(&mut self.named_chunk_ids_artifact); - plugin_driver - .compilation_hooks - .chunk_ids - .call( - self, - &mut chunk_by_ukey, - &mut named_chunk_ids_artifact, - &mut diagnostics, - ) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.chunkIds"))?; - self.chunk_by_ukey = chunk_by_ukey; - self.named_chunk_ids_artifact = named_chunk_ids_artifact; - self.extend_diagnostics(diagnostics); - logger.time_end(start); - Ok(()) + // Check if CHUNK_IDS pass is disabled, and clear artifact if needed + if !compilation + .incremental + .passes_enabled(IncrementalPasses::CHUNK_IDS) + { + compilation.named_chunk_ids_artifact.clear(); } + + let mut diagnostics = vec![]; + let mut chunk_by_ukey = mem::take(&mut compilation.chunk_by_ukey); + let mut named_chunk_ids_artifact = mem::take(&mut compilation.named_chunk_ids_artifact); + plugin_driver + .compilation_hooks + .chunk_ids + .call( + compilation, + &mut chunk_by_ukey, + &mut named_chunk_ids_artifact, + &mut diagnostics, + ) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.chunkIds"))?; + compilation.chunk_by_ukey = chunk_by_ukey; + compilation.named_chunk_ids_artifact = named_chunk_ids_artifact; + compilation.extend_diagnostics(diagnostics); + logger.time_end(start); + Ok(()) } diff --git a/crates/rspack_core/src/compilation/code_generation/mod.rs b/crates/rspack_core/src/compilation/code_generation/mod.rs index 8cd5fc5e972f..cc8922cf06be 100644 --- a/crates/rspack_core/src/compilation/code_generation/mod.rs +++ b/crates/rspack_core/src/compilation/code_generation/mod.rs @@ -1,63 +1,68 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn code_generation_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("code generation"); - let code_generation_modules = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::MODULES_CODEGEN) - && !self.code_generation_results.is_empty() - { - let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ModuleRemove { module } => Some(*module), +pub async fn code_generation_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("code generation"); + let code_generation_modules = if let Some(mutations) = compilation + .incremental + .mutations_read(IncrementalPasses::MODULES_CODEGEN) + && !compilation.code_generation_results.is_empty() + { + let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ModuleRemove { module } => Some(*module), + _ => None, + }); + for revoked_module in revoked_modules { + compilation.code_generation_results.remove(&revoked_module); + } + let modules: IdentifierSet = mutations + .iter() + .filter_map(|mutation| match mutation { + Mutation::ModuleSetHashes { module } => Some(*module), _ => None, - }); - for revoked_module in revoked_modules { - self.code_generation_results.remove(&revoked_module); - } - let modules: IdentifierSet = mutations - .iter() - .filter_map(|mutation| match mutation { - Mutation::ModuleSetHashes { module } => Some(*module), - _ => None, - }) - .collect(); - // also cleanup for updated modules, for `insert(); insert();` the second insert() won't override the first insert() on code_generation_results - for module in &modules { - self.code_generation_results.remove(module); - } - tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_CODEGEN, %mutations); - let logger = self.get_logger("rspack.incremental.modulesCodegen"); - logger.log(format!( - "{} modules are affected, {} in total", - modules.len(), - self.get_module_graph().modules().len() - )); - modules - } else { - self.code_generation_results = Default::default(); - self.get_module_graph().modules().keys().copied().collect() - }; - self.code_generation(code_generation_modules).await?; + }) + .collect(); + // also cleanup for updated modules, for `insert(); insert();` the second insert() won't override the first insert() on code_generation_results + for module in &modules { + compilation.code_generation_results.remove(module); + } + tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_CODEGEN, %mutations); + let logger = compilation.get_logger("rspack.incremental.modulesCodegen"); + logger.log(format!( + "{} modules are affected, {} in total", + modules.len(), + compilation.get_module_graph().modules().len() + )); + modules + } else { + compilation.code_generation_results = Default::default(); + compilation + .get_module_graph() + .modules() + .keys() + .copied() + .collect() + }; + compilation.code_generation(code_generation_modules).await?; - let mut diagnostics = vec![]; - plugin_driver - .compilation_hooks - .after_code_generation - .call(self, &mut diagnostics) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterCodeGeneration"))?; - self.extend_diagnostics(diagnostics); + let mut diagnostics = vec![]; + plugin_driver + .compilation_hooks + .after_code_generation + .call(compilation, &mut diagnostics) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterCodeGeneration"))?; + compilation.extend_diagnostics(diagnostics); - logger.time_end(start); - Ok(()) - } + logger.time_end(start); + Ok(()) +} +impl Compilation { #[instrument("Compilation:code_generation",target=TRACING_BENCH_TARGET, skip_all)] async fn code_generation(&mut self, modules: IdentifierSet) -> Result<()> { let logger = self.get_logger("rspack.Compilation"); diff --git a/crates/rspack_core/src/compilation/create_chunk_assets/mod.rs b/crates/rspack_core/src/compilation/create_chunk_assets/mod.rs index d8d7d4074e41..d26ec44ff4b4 100644 --- a/crates/rspack_core/src/compilation/create_chunk_assets/mod.rs +++ b/crates/rspack_core/src/compilation/create_chunk_assets/mod.rs @@ -1,18 +1,18 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn create_chunk_assets_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("create chunk assets"); - self.create_chunk_assets(plugin_driver).await?; - logger.time_end(start); - Ok(()) - } +pub async fn create_chunk_assets_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("create chunk assets"); + compilation.create_chunk_assets(plugin_driver).await?; + logger.time_end(start); + Ok(()) +} +impl Compilation { #[instrument("Compilation::create_chunk_assets",target=TRACING_BENCH_TARGET, skip_all)] async fn create_chunk_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { if (self.options.output.filename.has_hash_placeholder() diff --git a/crates/rspack_core/src/compilation/create_hash/mod.rs b/crates/rspack_core/src/compilation/create_hash/mod.rs index 3c3b3abea142..25ab646b16d4 100644 --- a/crates/rspack_core/src/compilation/create_hash/mod.rs +++ b/crates/rspack_core/src/compilation/create_hash/mod.rs @@ -6,16 +6,19 @@ pub struct ChunkHashResult { pub content_hash: ChunkContentHash, } -impl Compilation { - pub async fn create_hash_pass(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("hashing"); - self.create_hash(plugin_driver).await?; - self.runtime_modules_code_generation().await?; - logger.time_end(start); - Ok(()) - } +pub async fn create_hash_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("hashing"); + compilation.create_hash(plugin_driver).await?; + compilation.runtime_modules_code_generation().await?; + logger.time_end(start); + Ok(()) +} +impl Compilation { #[instrument(name = "Compilation:create_hash",target=TRACING_BENCH_TARGET, skip_all)] pub async fn create_hash(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { let logger = self.get_logger("rspack.Compilation"); diff --git a/crates/rspack_core/src/compilation/create_module_assets/mod.rs b/crates/rspack_core/src/compilation/create_module_assets/mod.rs index e4d08d8d1f48..1d029caa1138 100644 --- a/crates/rspack_core/src/compilation/create_module_assets/mod.rs +++ b/crates/rspack_core/src/compilation/create_module_assets/mod.rs @@ -1,18 +1,18 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn create_module_assets_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("create module assets"); - self.create_module_assets(plugin_driver).await; - logger.time_end(start); - Ok(()) - } +pub async fn create_module_assets_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("create module assets"); + compilation.create_module_assets(plugin_driver).await; + logger.time_end(start); + Ok(()) +} +impl Compilation { #[instrument("Compilation:create_module_assets",target=TRACING_BENCH_TARGET, skip_all)] async fn create_module_assets(&mut self, _plugin_driver: SharedPluginDriver) { let mut chunk_asset_map = vec![]; diff --git a/crates/rspack_core/src/compilation/create_module_hashes/mod.rs b/crates/rspack_core/src/compilation/create_module_hashes/mod.rs index 6e5554625027..282833f8cbb1 100644 --- a/crates/rspack_core/src/compilation/create_module_hashes/mod.rs +++ b/crates/rspack_core/src/compilation/create_module_hashes/mod.rs @@ -1,98 +1,104 @@ use super::*; +use crate::logger::Logger; -impl Compilation { - pub async fn create_module_hashes_pass(&mut self) -> Result<()> { - // Check if MODULES_HASHES pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::MODULES_HASHES) - { - self.cgm_hash_artifact.clear(); +pub async fn create_module_hashes_pass(compilation: &mut Compilation) -> Result<()> { + // Check if MODULES_HASHES pass is disabled, and clear artifact if needed + if !compilation + .incremental + .passes_enabled(IncrementalPasses::MODULES_HASHES) + { + compilation.cgm_hash_artifact.clear(); + } + + let create_module_hashes_modules = if let Some(mutations) = compilation + .incremental + .mutations_read(IncrementalPasses::MODULES_HASHES) + && !compilation.cgm_hash_artifact.is_empty() + { + let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ModuleRemove { module } => Some(*module), + _ => None, + }); + for revoked_module in revoked_modules { + compilation.cgm_hash_artifact.remove(&revoked_module); } + let mut modules = mutations.get_affected_modules_with_chunk_graph(compilation); - let create_module_hashes_modules = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::MODULES_HASHES) - && !self.cgm_hash_artifact.is_empty() - { - let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ModuleRemove { module } => Some(*module), - _ => None, - }); - for revoked_module in revoked_modules { - self.cgm_hash_artifact.remove(&revoked_module); - } - let mut modules = mutations.get_affected_modules_with_chunk_graph(self); + // check if module runtime changes + let mg = compilation.get_module_graph(); + for mi in mg.modules().keys() { + let module_runtimes = compilation + .chunk_graph + .get_module_runtimes(*mi, &compilation.chunk_by_ukey); + let module_runtime_keys = module_runtimes + .values() + .map(get_runtime_key) + .collect::>(); - // check if module runtime changes - let mg = self.get_module_graph(); - for mi in mg.modules().keys() { - let module_runtimes = self - .chunk_graph - .get_module_runtimes(*mi, &self.chunk_by_ukey); - let module_runtime_keys = module_runtimes - .values() - .map(get_runtime_key) - .collect::>(); + if let Some(runtime_map) = compilation.cgm_hash_artifact.get_runtime_map(mi) { + if module_runtimes.is_empty() { + // module has no runtime, skip + continue; + } + if module_runtimes.len() == 1 { + // single runtime + if !matches!(runtime_map.mode, RuntimeMode::SingleEntry) + || runtime_map + .single_runtime + .as_ref() + .expect("should have single runtime for single entry") + != module_runtimes + .values() + .next() + .expect("should have at least one runtime") + { + modules.insert(*mi); + } + } else { + // multiple runtimes + if matches!(runtime_map.mode, RuntimeMode::SingleEntry) { + modules.insert(*mi); + continue; + } - if let Some(runtime_map) = self.cgm_hash_artifact.get_runtime_map(mi) { - if module_runtimes.is_empty() { - // module has no runtime, skip + if runtime_map.map.len() != module_runtimes.len() { + modules.insert(*mi); continue; } - if module_runtimes.len() == 1 { - // single runtime - if !matches!(runtime_map.mode, RuntimeMode::SingleEntry) - || runtime_map - .single_runtime - .as_ref() - .expect("should have single runtime for single entry") - != module_runtimes - .values() - .next() - .expect("should have at least one runtime") - { - modules.insert(*mi); - } - } else { - // multiple runtimes - if matches!(runtime_map.mode, RuntimeMode::SingleEntry) { - modules.insert(*mi); - continue; - } - if runtime_map.map.len() != module_runtimes.len() { + for runtime_key in runtime_map.map.keys() { + if !module_runtime_keys.contains(runtime_key) { modules.insert(*mi); - continue; - } - - for runtime_key in runtime_map.map.keys() { - if !module_runtime_keys.contains(runtime_key) { - modules.insert(*mi); - break; - } + break; } } } } + } - tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_HASHES, %mutations, ?modules); - let logger = self.get_logger("rspack.incremental.modulesHashes"); - logger.log(format!( - "{} modules are affected, {} in total", - modules.len(), - mg.modules().len() - )); + tracing::debug!(target: incremental::TRACING_TARGET, passes = %IncrementalPasses::MODULES_HASHES, %mutations, ?modules); + let logger = compilation.get_logger("rspack.incremental.modulesHashes"); + logger.log(format!( + "{} modules are affected, {} in total", + modules.len(), + mg.modules().len() + )); - modules - } else { - self.get_module_graph().modules().keys().copied().collect() - }; - self - .create_module_hashes(create_module_hashes_modules) - .await - } + modules + } else { + compilation + .get_module_graph() + .modules() + .keys() + .copied() + .collect() + }; + compilation + .create_module_hashes(create_module_hashes_modules) + .await +} +impl Compilation { #[instrument("Compilation:create_module_hashes", skip_all)] pub async fn create_module_hashes(&mut self, modules: IdentifierSet) -> Result<()> { let mg = self.get_module_graph(); diff --git a/crates/rspack_core/src/compilation/mod.rs b/crates/rspack_core/src/compilation/mod.rs index 8a0b8c887b6e..b1e18cc9bbee 100644 --- a/crates/rspack_core/src/compilation/mod.rs +++ b/crates/rspack_core/src/compilation/mod.rs @@ -1,24 +1,24 @@ +mod after_seal; +mod assign_runtime_ids; pub mod build_chunk_graph; pub mod build_module_graph; -mod assign_runtime_ids; -mod create_module_assets; -mod create_chunk_assets; -mod process_assets; -mod after_seal; +mod chunk_ids; mod code_generation; -mod runtime_requirements; +mod create_chunk_assets; mod create_hash; +mod create_module_assets; mod create_module_hashes; mod finish_module; +mod module_ids; +mod optimize_chunk_modules; +mod optimize_chunks; +mod optimize_code_generation; mod optimize_dependencies; mod optimize_modules; -mod optimize_chunks; mod optimize_tree; -mod optimize_chunk_modules; -mod module_ids; -mod chunk_ids; -mod optimize_code_generation; +mod process_assets; mod run_passes; +mod runtime_requirements; use std::{ collections::{VecDeque, hash_map}, fmt::{self, Debug}, @@ -67,7 +67,7 @@ use crate::{ DependenciesDiagnosticsArtifact, DependencyCodeGeneration, DependencyTemplate, DependencyTemplateType, DependencyType, DerefOption, Entry, EntryData, EntryOptions, EntryRuntime, Entrypoint, ExecuteModuleId, Filename, ImportPhase, ImportVarMap, - ImportedByDeferModulesArtifact, Logger, MemoryGCStorage, ModuleFactory, ModuleGraph, + ImportedByDeferModulesArtifact, MemoryGCStorage, ModuleFactory, ModuleGraph, ModuleGraphCacheArtifact, ModuleIdentifier, ModuleIdsArtifact, ModuleStaticCacheArtifact, PathData, ResolverFactory, RuntimeGlobals, RuntimeKeyMap, RuntimeMode, RuntimeModule, RuntimeSpec, RuntimeSpecMap, RuntimeTemplate, SharedPluginDriver, SideEffectsOptimizeArtifact, @@ -1062,34 +1062,6 @@ impl Compilation { self.chunk_group_by_ukey.expect_get_mut(ukey) } - #[instrument("Compilation:seal", skip_all)] - pub async fn seal(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - // add a checkpoint here since we may modify module graph later in incremental compilation - // and we can recover to this checkpoint in the future - if self.incremental.passes_enabled(IncrementalPasses::MAKE) { - self.build_module_graph_artifact.module_graph.checkpoint(); - } - - if !self.options.mode.is_development() { - self.module_static_cache_artifact.freeze(); - } - - // https://github.com/webpack/webpack/blob/main/lib/Compilation.js#L2809 - plugin_driver - .compilation_hooks - .seal - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.seal"))?; - - self.run_passes(plugin_driver).await?; - - if !self.options.mode.is_development() { - self.module_static_cache_artifact.unfreeze(); - } - Ok(()) - } - pub fn get_chunk_graph_entries(&self) -> impl Iterator + use<'_> { let entries = self.entrypoints.values().map(|entrypoint_ukey| { let entrypoint = self.chunk_group_by_ukey.expect_get(entrypoint_ukey); diff --git a/crates/rspack_core/src/compilation/module_ids/mod.rs b/crates/rspack_core/src/compilation/module_ids/mod.rs index 05b31be63129..ecdf22998120 100644 --- a/crates/rspack_core/src/compilation/module_ids/mod.rs +++ b/crates/rspack_core/src/compilation/module_ids/mod.rs @@ -1,30 +1,31 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn module_ids_pass(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("module ids"); +pub async fn module_ids_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("module ids"); - // Check if MODULE_IDS pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::MODULE_IDS) - { - self.module_ids_artifact.clear(); - } - - let mut diagnostics = vec![]; - let mut module_ids_artifact = mem::take(&mut self.module_ids_artifact); - plugin_driver - .compilation_hooks - .module_ids - .call(self, &mut module_ids_artifact, &mut diagnostics) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.moduleIds"))?; - self.module_ids_artifact = module_ids_artifact; - self.extend_diagnostics(diagnostics); - logger.time_end(start); - Ok(()) + // Check if MODULE_IDS pass is disabled, and clear artifact if needed + if !compilation + .incremental + .passes_enabled(IncrementalPasses::MODULE_IDS) + { + compilation.module_ids_artifact.clear(); } + + let mut diagnostics = vec![]; + let mut module_ids_artifact = mem::take(&mut compilation.module_ids_artifact); + plugin_driver + .compilation_hooks + .module_ids + .call(compilation, &mut module_ids_artifact, &mut diagnostics) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.moduleIds"))?; + compilation.module_ids_artifact = module_ids_artifact; + compilation.extend_diagnostics(diagnostics); + logger.time_end(start); + Ok(()) } diff --git a/crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs b/crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs index 27d243cf089c..15b157d10de0 100644 --- a/crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs +++ b/crates/rspack_core/src/compilation/optimize_chunk_modules/mod.rs @@ -1,16 +1,14 @@ use super::*; -impl Compilation { - pub async fn optimize_chunk_modules_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - plugin_driver - .compilation_hooks - .optimize_chunk_modules - .call(self) - .await - .map(|_| ()) - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunkModules")) - } +pub async fn optimize_chunk_modules_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + plugin_driver + .compilation_hooks + .optimize_chunk_modules + .call(compilation) + .await + .map(|_| ()) + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunkModules")) } diff --git a/crates/rspack_core/src/compilation/optimize_chunks/mod.rs b/crates/rspack_core/src/compilation/optimize_chunks/mod.rs index 15340763b8d4..6bc6a593c054 100644 --- a/crates/rspack_core/src/compilation/optimize_chunks/mod.rs +++ b/crates/rspack_core/src/compilation/optimize_chunks/mod.rs @@ -1,19 +1,17 @@ use super::*; -impl Compilation { - pub async fn optimize_chunks_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - while matches!( - plugin_driver - .compilation_hooks - .optimize_chunks - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunks"))?, - Some(true) - ) {} - Ok(()) - } +pub async fn optimize_chunks_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + while matches!( + plugin_driver + .compilation_hooks + .optimize_chunks + .call(compilation) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeChunks"))?, + Some(true) + ) {} + Ok(()) } diff --git a/crates/rspack_core/src/compilation/optimize_code_generation/mod.rs b/crates/rspack_core/src/compilation/optimize_code_generation/mod.rs index d92cfa60d44d..280b91cbb6ae 100644 --- a/crates/rspack_core/src/compilation/optimize_code_generation/mod.rs +++ b/crates/rspack_core/src/compilation/optimize_code_generation/mod.rs @@ -1,20 +1,18 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn optimize_code_generation_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("optimize code generation"); - plugin_driver - .compilation_hooks - .optimize_code_generation - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeCodeGeneration"))?; - logger.time_end(start); - Ok(()) - } +pub async fn optimize_code_generation_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("optimize code generation"); + plugin_driver + .compilation_hooks + .optimize_code_generation + .call(compilation) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeCodeGeneration"))?; + logger.time_end(start); + Ok(()) } diff --git a/crates/rspack_core/src/compilation/optimize_dependencies/mod.rs b/crates/rspack_core/src/compilation/optimize_dependencies/mod.rs index 8d9bdb23ea0d..79f4ffc52bc5 100644 --- a/crates/rspack_core/src/compilation/optimize_dependencies/mod.rs +++ b/crates/rspack_core/src/compilation/optimize_dependencies/mod.rs @@ -1,41 +1,39 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn optimize_dependencies_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("optimize dependencies"); - // https://github.com/webpack/webpack/blob/d15c73469fd71cf98734685225250148b68ddc79/lib/Compilation.js#L2812-L2814 +pub async fn optimize_dependencies_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("optimize dependencies"); + // https://github.com/webpack/webpack/blob/d15c73469fd71cf98734685225250148b68ddc79/lib/Compilation.js#L2812-L2814 - let mut diagnostics: Vec = vec![]; - let mut side_effects_optimize_artifact = self.side_effects_optimize_artifact.take(); - let mut build_module_graph_artifact = self.build_module_graph_artifact.take(); - while matches!( - plugin_driver - .compilation_hooks - .optimize_dependencies - .call( - self, - &mut side_effects_optimize_artifact, - &mut build_module_graph_artifact, - &mut diagnostics - ) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeDependencies"))?, - Some(true) - ) {} - self - .side_effects_optimize_artifact - .replace(side_effects_optimize_artifact); - self - .build_module_graph_artifact - .replace(build_module_graph_artifact); - self.extend_diagnostics(diagnostics); + let mut diagnostics: Vec = vec![]; + let mut side_effects_optimize_artifact = compilation.side_effects_optimize_artifact.take(); + let mut build_module_graph_artifact = compilation.build_module_graph_artifact.take(); + while matches!( + plugin_driver + .compilation_hooks + .optimize_dependencies + .call( + compilation, + &mut side_effects_optimize_artifact, + &mut build_module_graph_artifact, + &mut diagnostics + ) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeDependencies"))?, + Some(true) + ) {} + compilation + .side_effects_optimize_artifact + .replace(side_effects_optimize_artifact); + compilation + .build_module_graph_artifact + .replace(build_module_graph_artifact); + compilation.extend_diagnostics(diagnostics); - logger.time_end(start); - Ok(()) - } + logger.time_end(start); + Ok(()) } diff --git a/crates/rspack_core/src/compilation/optimize_modules/mod.rs b/crates/rspack_core/src/compilation/optimize_modules/mod.rs index e162e9512ac4..72a6779165f3 100644 --- a/crates/rspack_core/src/compilation/optimize_modules/mod.rs +++ b/crates/rspack_core/src/compilation/optimize_modules/mod.rs @@ -1,27 +1,25 @@ use super::*; -impl Compilation { - pub async fn optimize_modules_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let mut diagnostics = vec![]; - while matches!( - plugin_driver - .compilation_hooks - .optimize_modules - .call(self, &mut diagnostics) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeModules"))?, - Some(true) - ) {} - self.extend_diagnostics(diagnostics); - +pub async fn optimize_modules_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let mut diagnostics = vec![]; + while matches!( plugin_driver .compilation_hooks - .after_optimize_modules - .call(self) + .optimize_modules + .call(compilation, &mut diagnostics) .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterOptimizeModules")) - } + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeModules"))?, + Some(true) + ) {} + compilation.extend_diagnostics(diagnostics); + + plugin_driver + .compilation_hooks + .after_optimize_modules + .call(compilation) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.afterOptimizeModules")) } diff --git a/crates/rspack_core/src/compilation/optimize_tree/mod.rs b/crates/rspack_core/src/compilation/optimize_tree/mod.rs index f127f8b716ac..ff62eba916a2 100644 --- a/crates/rspack_core/src/compilation/optimize_tree/mod.rs +++ b/crates/rspack_core/src/compilation/optimize_tree/mod.rs @@ -1,15 +1,13 @@ use super::*; -impl Compilation { - pub async fn optimize_tree_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - plugin_driver - .compilation_hooks - .optimize_tree - .call(self) - .await - .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeTree")) - } +pub async fn optimize_tree_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + plugin_driver + .compilation_hooks + .optimize_tree + .call(compilation) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.optimizeTree")) } diff --git a/crates/rspack_core/src/compilation/process_assets/mod.rs b/crates/rspack_core/src/compilation/process_assets/mod.rs index 009d3057b868..3e3887944f9f 100644 --- a/crates/rspack_core/src/compilation/process_assets/mod.rs +++ b/crates/rspack_core/src/compilation/process_assets/mod.rs @@ -1,22 +1,22 @@ use super::*; use crate::logger::Logger; -impl Compilation { - pub async fn process_assets_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("process assets"); - self.process_assets(plugin_driver.clone()).await?; - logger.time_end(start); +pub async fn process_assets_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("process assets"); + compilation.process_assets(plugin_driver.clone()).await?; + logger.time_end(start); - let start = logger.time("after process assets"); - self.after_process_assets(plugin_driver).await?; - logger.time_end(start); - Ok(()) - } + let start = logger.time("after process assets"); + compilation.after_process_assets(plugin_driver).await?; + logger.time_end(start); + Ok(()) +} +impl Compilation { #[instrument("Compilation:process_assets",target=TRACING_BENCH_TARGET, skip_all)] async fn process_assets(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { plugin_driver diff --git a/crates/rspack_core/src/compilation/rspack_passes.md b/crates/rspack_core/src/compilation/rspack_passes.md index e1ab2125ddfe..b7dc1a3eda9c 100644 --- a/crates/rspack_core/src/compilation/rspack_passes.md +++ b/crates/rspack_core/src/compilation/rspack_passes.md @@ -11,7 +11,7 @@ The compilation process is organized into independent modules, each responsible ``` compilation/ ├── mod.rs # Main Compilation struct which exposes the public API -├── run_passes.rs # Pass driver which calls all pass(which includes make and seal) +├── run_passes.rs # Pass driver invoked from Compiler that runs make + seal passes ├── build_module_graph/ # Module graph construction │ └── finish_module/ # Finalize module graph, async modules, dependency diagnostics ├── optimize_dependencies/ # optimizeDependencies hook + side effects artifact @@ -36,18 +36,22 @@ compilation/ ## Pass Order -`run_passes` orchestrates passes after `CompilationHooks::seal` in this order: - -1. `optimize_dependencies_pass` -2. `build_chunk_graph_pass` → `optimize_modules_pass` → `optimize_chunks_pass` -3. `optimize_tree_pass` → `optimize_chunk_modules_pass` -4. `module_ids_pass` → `chunk_ids_pass` → `assign_runtime_ids` -5. `optimize_code_generation_pass` -6. `create_module_hashes_pass` -7. `code_generation_pass` -8. `runtime_requirements_pass` -9. `create_hash_pass` (also runs runtime module code generation) -10. `create_module_assets_pass` -11. `create_chunk_assets_pass` -12. `process_assets_pass` -13. `after_seal_pass` +`run_passes` orchestrates the full pipeline (make + seal) in this order: + +1. Compiler hooks: `thisCompilation` → `compilation` +2. Make: `make` hook → `build_module_graph` → `finish_make` hook → `finish_build_module_graph` +3. Collect make diagnostics (`collect_build_module_graph_effects`) +4. Seal kickoff: `CompilationHooks::seal` (checkpoint + cache freeze) +5. `optimize_dependencies_pass` +6. `build_chunk_graph_pass` → `optimize_modules_pass` → `optimize_chunks_pass` +7. `optimize_tree_pass` → `optimize_chunk_modules_pass` +8. `module_ids_pass` → `chunk_ids_pass` → `assign_runtime_ids` +9. `optimize_code_generation_pass` +10. `create_module_hashes_pass` +11. `code_generation_pass` +12. `runtime_requirements_pass` +13. `create_hash_pass` (also runs runtime module code generation) +14. `create_module_assets_pass` +15. `create_chunk_assets_pass` +16. `process_assets_pass` +17. `after_seal_pass` diff --git a/crates/rspack_core/src/compilation/run_passes.rs b/crates/rspack_core/src/compilation/run_passes.rs index c84e6baea21b..7c6bc5777847 100644 --- a/crates/rspack_core/src/compilation/run_passes.rs +++ b/crates/rspack_core/src/compilation/run_passes.rs @@ -1,55 +1,69 @@ -use super::*; -use crate::logger::Logger; +use super::{ + after_seal::after_seal_pass, assign_runtime_ids::assign_runtime_ids, + build_chunk_graph::pass::build_chunk_graph_pass, chunk_ids::chunk_ids_pass, + code_generation::code_generation_pass, create_chunk_assets::create_chunk_assets_pass, + create_hash::create_hash_pass, create_module_assets::create_module_assets_pass, + create_module_hashes::create_module_hashes_pass, module_ids::module_ids_pass, + optimize_chunk_modules::optimize_chunk_modules_pass, optimize_chunks::optimize_chunks_pass, + optimize_code_generation::optimize_code_generation_pass, + optimize_dependencies::optimize_dependencies_pass, optimize_modules::optimize_modules_pass, + optimize_tree::optimize_tree_pass, process_assets::process_assets_pass, + runtime_requirements::runtime_requirements_pass, *, +}; +use crate::{Compilation, SharedPluginDriver, incremental::IncrementalPasses, logger::Logger}; impl Compilation { pub async fn run_passes(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { - self - .optimize_dependencies_pass(plugin_driver.clone()) - .await?; + // add a checkpoint here since we may modify module graph later in incremental compilation + // and we can recover to this checkpoint in the future + if self.incremental.passes_enabled(IncrementalPasses::MAKE) { + self.build_module_graph_artifact.module_graph.checkpoint(); + } + + if !self.options.mode.is_development() { + self.module_static_cache_artifact.freeze(); + } + + // https://github.com/webpack/webpack/blob/main/lib/Compilation.js#L2809 + plugin_driver + .compilation_hooks + .seal + .call(self) + .await + .map_err(|e| e.wrap_err("caused by plugins in Compilation.hooks.seal"))?; + + optimize_dependencies_pass(self, plugin_driver.clone()).await?; let logger = self.get_logger("rspack.Compilation"); let create_chunks_start = logger.time("create chunks"); - self.build_chunk_graph_pass().await?; - self - .optimize_modules_pass(plugin_driver.clone()) - .await?; - self - .optimize_chunks_pass(plugin_driver.clone()) - .await?; + build_chunk_graph_pass(self).await?; + optimize_modules_pass(self, plugin_driver.clone()).await?; + optimize_chunks_pass(self, plugin_driver.clone()).await?; logger.time_end(create_chunks_start); let optimize_start = logger.time("optimize"); - self - .optimize_tree_pass(plugin_driver.clone()) - .await?; - self - .optimize_chunk_modules_pass(plugin_driver.clone()) - .await?; + optimize_tree_pass(self, plugin_driver.clone()).await?; + optimize_chunk_modules_pass(self, plugin_driver.clone()).await?; logger.time_end(optimize_start); - self.module_ids_pass(plugin_driver.clone()).await?; - self.chunk_ids_pass(plugin_driver.clone()).await?; - self.assign_runtime_ids(); - - self - .optimize_code_generation_pass(plugin_driver.clone()) - .await?; - self.create_module_hashes_pass().await?; - self.code_generation_pass(plugin_driver.clone()).await?; - self - .runtime_requirements_pass(plugin_driver.clone()) - .await?; - self.create_hash_pass(plugin_driver.clone()).await?; - self - .create_module_assets_pass(plugin_driver.clone()) - .await?; - self - .create_chunk_assets_pass(plugin_driver.clone()) - .await?; - self.process_assets_pass(plugin_driver.clone()).await?; - self.after_seal_pass(plugin_driver).await?; + module_ids_pass(self, plugin_driver.clone()).await?; + chunk_ids_pass(self, plugin_driver.clone()).await?; + assign_runtime_ids(self); + + optimize_code_generation_pass(self, plugin_driver.clone()).await?; + create_module_hashes_pass(self).await?; + code_generation_pass(self, plugin_driver.clone()).await?; + runtime_requirements_pass(self, plugin_driver.clone()).await?; + create_hash_pass(self, plugin_driver.clone()).await?; + create_module_assets_pass(self, plugin_driver.clone()).await?; + create_chunk_assets_pass(self, plugin_driver.clone()).await?; + process_assets_pass(self, plugin_driver.clone()).await?; + after_seal_pass(self, plugin_driver).await?; + if !self.options.mode.is_development() { + self.module_static_cache_artifact.unfreeze(); + } Ok(()) } } diff --git a/crates/rspack_core/src/compilation/runtime_requirements/mod.rs b/crates/rspack_core/src/compilation/runtime_requirements/mod.rs index c06fca20224d..7892b242edb0 100644 --- a/crates/rspack_core/src/compilation/runtime_requirements/mod.rs +++ b/crates/rspack_core/src/compilation/runtime_requirements/mod.rs @@ -1,6 +1,114 @@ use super::*; use crate::logger::Logger; +pub async fn runtime_requirements_pass( + compilation: &mut Compilation, + plugin_driver: SharedPluginDriver, +) -> Result<()> { + let logger = compilation.get_logger("rspack.Compilation"); + let start = logger.time("runtime requirements"); + let process_runtime_requirements_modules = if let Some(mutations) = compilation + .incremental + .mutations_read(IncrementalPasses::MODULES_RUNTIME_REQUIREMENTS) + && !compilation.cgm_runtime_requirements_artifact.is_empty() + { + let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ModuleRemove { module } => Some(*module), + _ => None, + }); + for revoked_module in revoked_modules { + compilation + .cgm_runtime_requirements_artifact + .remove(&revoked_module); + } + let modules: IdentifierSet = mutations + .iter() + .filter_map(|mutation| match mutation { + Mutation::ModuleSetHashes { module } => Some(*module), + _ => None, + }) + .collect(); + let logger = compilation.get_logger("rspack.incremental.modulesRuntimeRequirements"); + logger.log(format!( + "{} modules are affected, {} in total", + modules.len(), + compilation.get_module_graph().modules().len() + )); + modules + } else { + compilation.cgm_runtime_requirements_artifact = Default::default(); + compilation + .get_module_graph() + .modules() + .keys() + .copied() + .collect() + }; + compilation + .process_modules_runtime_requirements( + process_runtime_requirements_modules, + plugin_driver.clone(), + ) + .await?; + let runtime_chunks = compilation.get_chunk_graph_entries().collect(); + + // Check if CHUNKS_RUNTIME_REQUIREMENTS pass is disabled, and clear artifact if needed + if !compilation + .incremental + .passes_enabled(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) + { + compilation.cgc_runtime_requirements_artifact.clear(); + } + + let process_runtime_requirements_chunks = if let Some(mutations) = compilation + .incremental + .mutations_read(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) + && !compilation.cgc_runtime_requirements_artifact.is_empty() + { + let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { + Mutation::ChunkRemove { chunk } => Some(chunk), + _ => None, + }); + for removed_chunk in removed_chunks { + compilation + .cgc_runtime_requirements_artifact + .remove(removed_chunk); + } + let affected_chunks = mutations.get_affected_chunks_with_chunk_graph(compilation); + for affected_chunk in &affected_chunks { + compilation + .cgc_runtime_requirements_artifact + .remove(affected_chunk); + } + for runtime_chunk in &runtime_chunks { + compilation + .cgc_runtime_requirements_artifact + .remove(runtime_chunk); + } + compilation + .cgc_runtime_requirements_artifact + .retain(|chunk, _| compilation.chunk_by_ukey.contains(chunk)); + let logger = compilation.get_logger("rspack.incremental.chunksRuntimeRequirements"); + logger.log(format!( + "{} chunks are affected, {} in total", + affected_chunks.len(), + compilation.chunk_by_ukey.len() + )); + affected_chunks + } else { + compilation.chunk_by_ukey.keys().copied().collect() + }; + compilation + .process_chunks_runtime_requirements( + process_runtime_requirements_chunks, + runtime_chunks, + plugin_driver.clone(), + ) + .await?; + logger.time_end(start); + Ok(()) +} + macro_rules! process_runtime_requirement_hook_macro { ($name: ident, $s: ty, $c: ty) => { async fn $name( @@ -45,105 +153,6 @@ macro_rules! process_runtime_requirement_hook_macro { } impl Compilation { - pub async fn runtime_requirements_pass( - &mut self, - plugin_driver: SharedPluginDriver, - ) -> Result<()> { - let logger = self.get_logger("rspack.Compilation"); - let start = logger.time("runtime requirements"); - let process_runtime_requirements_modules = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::MODULES_RUNTIME_REQUIREMENTS) - && !self.cgm_runtime_requirements_artifact.is_empty() - { - let revoked_modules = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ModuleRemove { module } => Some(*module), - _ => None, - }); - for revoked_module in revoked_modules { - self - .cgm_runtime_requirements_artifact - .remove(&revoked_module); - } - let modules: IdentifierSet = mutations - .iter() - .filter_map(|mutation| match mutation { - Mutation::ModuleSetHashes { module } => Some(*module), - _ => None, - }) - .collect(); - let logger = self.get_logger("rspack.incremental.modulesRuntimeRequirements"); - logger.log(format!( - "{} modules are affected, {} in total", - modules.len(), - self.get_module_graph().modules().len() - )); - modules - } else { - self.cgm_runtime_requirements_artifact = Default::default(); - self.get_module_graph().modules().keys().copied().collect() - }; - self - .process_modules_runtime_requirements( - process_runtime_requirements_modules, - plugin_driver.clone(), - ) - .await?; - let runtime_chunks = self.get_chunk_graph_entries().collect(); - - // Check if CHUNKS_RUNTIME_REQUIREMENTS pass is disabled, and clear artifact if needed - if !self - .incremental - .passes_enabled(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) - { - self.cgc_runtime_requirements_artifact.clear(); - } - - let process_runtime_requirements_chunks = if let Some(mutations) = self - .incremental - .mutations_read(IncrementalPasses::CHUNKS_RUNTIME_REQUIREMENTS) - && !self.cgc_runtime_requirements_artifact.is_empty() - { - let removed_chunks = mutations.iter().filter_map(|mutation| match mutation { - Mutation::ChunkRemove { chunk } => Some(chunk), - _ => None, - }); - for removed_chunk in removed_chunks { - self.cgc_runtime_requirements_artifact.remove(removed_chunk); - } - let affected_chunks = mutations.get_affected_chunks_with_chunk_graph(self); - for affected_chunk in &affected_chunks { - self - .cgc_runtime_requirements_artifact - .remove(affected_chunk); - } - for runtime_chunk in &runtime_chunks { - self.cgc_runtime_requirements_artifact.remove(runtime_chunk); - } - self - .cgc_runtime_requirements_artifact - .retain(|chunk, _| self.chunk_by_ukey.contains(chunk)); - let logger = self.get_logger("rspack.incremental.chunksRuntimeRequirements"); - logger.log(format!( - "{} chunks are affected, {} in total", - affected_chunks.len(), - self.chunk_by_ukey.len() - )); - affected_chunks - } else { - self.chunk_by_ukey.keys().copied().collect() - }; - self - .process_chunks_runtime_requirements( - process_runtime_requirements_chunks, - runtime_chunks, - plugin_driver.clone(), - ) - .await?; - logger.time_end(start); - Ok(()) - } - #[instrument("Compilation:process_modules_runtime_requirements", skip_all)] pub async fn process_modules_runtime_requirements( &mut self, diff --git a/crates/rspack_core/src/compiler/mod.rs b/crates/rspack_core/src/compiler/mod.rs index 9355e8d1e2b8..884dab6ff2ea 100644 --- a/crates/rspack_core/src/compiler/mod.rs +++ b/crates/rspack_core/src/compiler/mod.rs @@ -248,7 +248,7 @@ impl Compiler { // self.compilation.incremental = Incremental::new_hot(self.options.experiments.incremental); // } - self.compile().await?; + self.run_passes().await?; self.old_cache.begin_idle(); self.compile_done().await?; self.cache.after_compile(&self.compilation).await; @@ -257,7 +257,8 @@ impl Compiler { Ok(()) } - async fn build_module_graph(&mut self) -> Result<()> { + #[instrument("Compiler:compile", target=TRACING_BENCH_TARGET,skip_all)] + async fn run_passes(&mut self) -> Result<()> { let mut compilation_params = self.new_compilation_params(); // FOR BINDING SAFETY: // Make sure `thisCompilation` hook was called for each `JsCompilation` update before any access to it. @@ -278,6 +279,7 @@ impl Compiler { .await?; let logger = self.compilation.get_logger("rspack.Compiler"); + let make_start = logger.time("make"); let make_hook_start = logger.time("make hook"); self @@ -312,18 +314,7 @@ impl Compiler { .await; logger.time_end(start); - Ok(()) - } - #[instrument("Compiler:compile", target=TRACING_BENCH_TARGET,skip_all)] - async fn compile(&mut self) -> Result<()> { - let logger = self.compilation.get_logger("rspack.Compiler"); - let start = logger.time("seal compilation"); - #[cfg(feature = "debug_tool")] - { - use rspack_util::debug_tool::wait_for_signal; - wait_for_signal("seal compilation"); - } - self.build_module_graph().await?; + let dependencies_diagnostics_artifact = self.compilation.dependencies_diagnostics_artifact.clone(); let async_modules_artifact = self.compilation.async_modules_artifact.clone(); @@ -335,7 +326,37 @@ impl Compiler { ) .await?; self.compilation.extend_diagnostics(diagnostics); - self.compilation.seal(self.plugin_driver.clone()).await?; + + #[cfg(feature = "debug_tool")] + { + use rspack_util::debug_tool::wait_for_signal; + wait_for_signal("seal compilation"); + } + + self + .compilation + .run_passes(self.plugin_driver.clone()) + .await?; + + // Consume plugin driver diagnostic + let plugin_driver_diagnostics = self.plugin_driver.take_diagnostic(); + self + .compilation + .extend_diagnostics(plugin_driver_diagnostics); + + Ok(()) + } + + #[instrument("Compiler:compile", target=TRACING_BENCH_TARGET,skip_all)] + async fn compile(&mut self) -> Result<()> { + let logger = self.compilation.get_logger("rspack.Compiler"); + let start = logger.time("seal compilation"); + #[cfg(feature = "debug_tool")] + { + use rspack_util::debug_tool::wait_for_signal; + wait_for_signal("seal compilation"); + } + self.run_passes().await?; logger.time_end(start); // Consume plugin driver diagnostic From b3e2270e629227fb82806ccdb3849698e0531b6f Mon Sep 17 00:00:00 2001 From: hardfist Date: Thu, 15 Jan 2026 01:20:46 +0800 Subject: [PATCH 4/6] refactor: move more into run_passes --- .../src/compilation/rspack_passes.md | 16 ++-- .../rspack_core/src/compilation/run_passes.rs | 52 ++++++++++- crates/rspack_core/src/compiler/mod.rs | 90 ++----------------- 3 files changed, 66 insertions(+), 92 deletions(-) diff --git a/crates/rspack_core/src/compilation/rspack_passes.md b/crates/rspack_core/src/compilation/rspack_passes.md index b7dc1a3eda9c..b849dd6a8545 100644 --- a/crates/rspack_core/src/compilation/rspack_passes.md +++ b/crates/rspack_core/src/compilation/rspack_passes.md @@ -34,14 +34,19 @@ compilation/ └── after_seal/ # afterSeal hook ``` -## Pass Order +## Pass Entry + +- `Compiler::compile` builds `CompilationParams`, fires `thisCompilation` then `compilation` compiler hooks (binding safety for JS), and delegates to `Compilation::run_passes`. +- `Compilation::run_passes` performs the make and seal stages using the order below. + +## Pass Order (Compilation::run_passes) `run_passes` orchestrates the full pipeline (make + seal) in this order: -1. Compiler hooks: `thisCompilation` → `compilation` -2. Make: `make` hook → `build_module_graph` → `finish_make` hook → `finish_build_module_graph` -3. Collect make diagnostics (`collect_build_module_graph_effects`) -4. Seal kickoff: `CompilationHooks::seal` (checkpoint + cache freeze) +1. Make: `make` hook → `build_module_graph` → `finish_make` hook → `finish_build_module_graph` +2. Collect make diagnostics (`collect_build_module_graph_effects`) +3. Incremental checkpoint (`module_graph`), freeze module static cache in production +4. Seal kickoff: `CompilationHooks::seal` 5. `optimize_dependencies_pass` 6. `build_chunk_graph_pass` → `optimize_modules_pass` → `optimize_chunks_pass` 7. `optimize_tree_pass` → `optimize_chunk_modules_pass` @@ -55,3 +60,4 @@ compilation/ 15. `create_chunk_assets_pass` 16. `process_assets_pass` 17. `after_seal_pass` +18. Unfreeze module static cache in production diff --git a/crates/rspack_core/src/compilation/run_passes.rs b/crates/rspack_core/src/compilation/run_passes.rs index 7c6bc5777847..6d2e8f65b189 100644 --- a/crates/rspack_core/src/compilation/run_passes.rs +++ b/crates/rspack_core/src/compilation/run_passes.rs @@ -10,10 +10,50 @@ use super::{ optimize_tree::optimize_tree_pass, process_assets::process_assets_pass, runtime_requirements::runtime_requirements_pass, *, }; -use crate::{Compilation, SharedPluginDriver, incremental::IncrementalPasses, logger::Logger}; +use crate::{ + Compilation, SharedPluginDriver, cache::Cache, incremental::IncrementalPasses, logger::Logger, +}; impl Compilation { - pub async fn run_passes(&mut self, plugin_driver: SharedPluginDriver) -> Result<()> { + pub async fn run_passes( + &mut self, + plugin_driver: SharedPluginDriver, + cache: &mut dyn Cache, + ) -> Result<()> { + let logger = self.get_logger("rspack.Compiler"); + + let make_start = logger.time("make"); + let make_hook_start = logger.time("make hook"); + cache + .before_build_module_graph(&mut self.build_module_graph_artifact) + .await; + + plugin_driver.compiler_hooks.make.call(self).await?; + logger.time_end(make_hook_start); + self.build_module_graph().await?; + logger.time_end(make_start); + + let start = logger.time("finish make hook"); + plugin_driver.compiler_hooks.finish_make.call(self).await?; + logger.time_end(start); + + let start = logger.time("finish compilation"); + self.finish_build_module_graph().await?; + cache + .after_build_module_graph(&self.build_module_graph_artifact) + .await; + logger.time_end(start); + + let dependencies_diagnostics_artifact = self.dependencies_diagnostics_artifact.clone(); + let async_modules_artifact = self.async_modules_artifact.clone(); + let diagnostics = self + .collect_build_module_graph_effects( + &mut dependencies_diagnostics_artifact.borrow_mut(), + &mut async_modules_artifact.borrow_mut(), + ) + .await?; + self.extend_diagnostics(diagnostics); + // add a checkpoint here since we may modify module graph later in incremental compilation // and we can recover to this checkpoint in the future if self.incremental.passes_enabled(IncrementalPasses::MAKE) { @@ -24,6 +64,12 @@ impl Compilation { self.module_static_cache_artifact.freeze(); } + #[cfg(feature = "debug_tool")] + { + use rspack_util::debug_tool::wait_for_signal; + wait_for_signal("seal compilation"); + } + // https://github.com/webpack/webpack/blob/main/lib/Compilation.js#L2809 plugin_driver .compilation_hooks @@ -34,8 +80,6 @@ impl Compilation { optimize_dependencies_pass(self, plugin_driver.clone()).await?; - let logger = self.get_logger("rspack.Compilation"); - let create_chunks_start = logger.time("create chunks"); build_chunk_graph_pass(self).await?; optimize_modules_pass(self, plugin_driver.clone()).await?; diff --git a/crates/rspack_core/src/compiler/mod.rs b/crates/rspack_core/src/compiler/mod.rs index 884dab6ff2ea..41284ef7d697 100644 --- a/crates/rspack_core/src/compiler/mod.rs +++ b/crates/rspack_core/src/compiler/mod.rs @@ -15,12 +15,13 @@ use tracing::instrument; pub use self::rebuild::CompilationRecords; use crate::{ BoxPlugin, CleanOptions, Compilation, CompilationAsset, CompilerOptions, CompilerPlatform, - ContextModuleFactory, Filename, KeepPattern, Logger, NormalModuleFactory, PluginDriver, + ContextModuleFactory, Filename, KeepPattern, NormalModuleFactory, PluginDriver, ResolverFactory, SharedPluginDriver, cache::{Cache, new_cache}, compilation::build_module_graph::ModuleExecutor, fast_set, include_hash, incremental::{Incremental, IncrementalPasses}, + logger::Logger, old_cache::Cache as OldCache, trim_dir, }; @@ -248,7 +249,7 @@ impl Compiler { // self.compilation.incremental = Incremental::new_hot(self.options.experiments.incremental); // } - self.run_passes().await?; + self.compile().await?; self.old_cache.begin_idle(); self.compile_done().await?; self.cache.after_compile(&self.compilation).await; @@ -258,13 +259,9 @@ impl Compiler { Ok(()) } #[instrument("Compiler:compile", target=TRACING_BENCH_TARGET,skip_all)] - async fn run_passes(&mut self) -> Result<()> { + async fn compile(&mut self) -> Result<()> { let mut compilation_params = self.new_compilation_params(); - // FOR BINDING SAFETY: - // Make sure `thisCompilation` hook was called for each `JsCompilation` update before any access to it. - // `JsCompiler` tapped `thisCompilation` to update the `JsCompilation` on the JavaScript side. - // Otherwise, trying to access the old native `JsCompilation` would cause undefined behavior - // as the previous instance might get dropped. + // Make sure `thisCompilation` is emitted before any JS side access to `JsCompilation`. self .plugin_driver .compiler_hooks @@ -279,84 +276,11 @@ impl Compiler { .await?; let logger = self.compilation.get_logger("rspack.Compiler"); - - let make_start = logger.time("make"); - let make_hook_start = logger.time("make hook"); - self - .cache - .before_build_module_graph(&mut self.compilation.build_module_graph_artifact) - .await; - - self - .plugin_driver - .compiler_hooks - .make - .call(&mut self.compilation) - .await?; - logger.time_end(make_hook_start); - self.compilation.build_module_graph().await?; - logger.time_end(make_start); - - let start = logger.time("finish make hook"); - self - .plugin_driver - .compiler_hooks - .finish_make - .call(&mut self.compilation) - .await?; - logger.time_end(start); - - let start = logger.time("finish compilation"); - self.compilation.finish_build_module_graph().await?; - self - .cache - .after_build_module_graph(&self.compilation.build_module_graph_artifact) - .await; - - logger.time_end(start); - - let dependencies_diagnostics_artifact = - self.compilation.dependencies_diagnostics_artifact.clone(); - let async_modules_artifact = self.compilation.async_modules_artifact.clone(); - let diagnostics = self - .compilation - .collect_build_module_graph_effects( - &mut dependencies_diagnostics_artifact.borrow_mut(), - &mut async_modules_artifact.borrow_mut(), - ) - .await?; - self.compilation.extend_diagnostics(diagnostics); - - #[cfg(feature = "debug_tool")] - { - use rspack_util::debug_tool::wait_for_signal; - wait_for_signal("seal compilation"); - } - + let start = logger.time("seal compilation"); self .compilation - .run_passes(self.plugin_driver.clone()) + .run_passes(self.plugin_driver.clone(), &mut *self.cache) .await?; - - // Consume plugin driver diagnostic - let plugin_driver_diagnostics = self.plugin_driver.take_diagnostic(); - self - .compilation - .extend_diagnostics(plugin_driver_diagnostics); - - Ok(()) - } - - #[instrument("Compiler:compile", target=TRACING_BENCH_TARGET,skip_all)] - async fn compile(&mut self) -> Result<()> { - let logger = self.compilation.get_logger("rspack.Compiler"); - let start = logger.time("seal compilation"); - #[cfg(feature = "debug_tool")] - { - use rspack_util::debug_tool::wait_for_signal; - wait_for_signal("seal compilation"); - } - self.run_passes().await?; logger.time_end(start); // Consume plugin driver diagnostic From 9020a32ba21c564caa55048f8dec2da552083dae Mon Sep 17 00:00:00 2001 From: hardfist Date: Thu, 15 Jan 2026 01:37:55 +0800 Subject: [PATCH 5/6] chore: fix stats logging --- crates/rspack_core/src/compilation/run_passes.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rspack_core/src/compilation/run_passes.rs b/crates/rspack_core/src/compilation/run_passes.rs index 6d2e8f65b189..ae062f7dd3cc 100644 --- a/crates/rspack_core/src/compilation/run_passes.rs +++ b/crates/rspack_core/src/compilation/run_passes.rs @@ -69,7 +69,7 @@ impl Compilation { use rspack_util::debug_tool::wait_for_signal; wait_for_signal("seal compilation"); } - + let logger = self.get_logger("rspack.Compiler"); // https://github.com/webpack/webpack/blob/main/lib/Compilation.js#L2809 plugin_driver .compilation_hooks From 99c7bedb66162550197fe999908314394ed876ff Mon Sep 17 00:00:00 2001 From: hardfist Date: Thu, 15 Jan 2026 01:42:04 +0800 Subject: [PATCH 6/6] chore: fix stats.logging --- crates/rspack_core/src/compilation/run_passes.rs | 2 +- crates/rspack_core/src/compiler/mod.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/rspack_core/src/compilation/run_passes.rs b/crates/rspack_core/src/compilation/run_passes.rs index ae062f7dd3cc..d6be8a9c4fba 100644 --- a/crates/rspack_core/src/compilation/run_passes.rs +++ b/crates/rspack_core/src/compilation/run_passes.rs @@ -69,7 +69,7 @@ impl Compilation { use rspack_util::debug_tool::wait_for_signal; wait_for_signal("seal compilation"); } - let logger = self.get_logger("rspack.Compiler"); + let logger = self.get_logger("rspack.Compilation"); // https://github.com/webpack/webpack/blob/main/lib/Compilation.js#L2809 plugin_driver .compilation_hooks diff --git a/crates/rspack_core/src/compiler/mod.rs b/crates/rspack_core/src/compiler/mod.rs index 41284ef7d697..5ae6b5e7021e 100644 --- a/crates/rspack_core/src/compiler/mod.rs +++ b/crates/rspack_core/src/compiler/mod.rs @@ -15,8 +15,8 @@ use tracing::instrument; pub use self::rebuild::CompilationRecords; use crate::{ BoxPlugin, CleanOptions, Compilation, CompilationAsset, CompilerOptions, CompilerPlatform, - ContextModuleFactory, Filename, KeepPattern, NormalModuleFactory, PluginDriver, - ResolverFactory, SharedPluginDriver, + ContextModuleFactory, Filename, KeepPattern, NormalModuleFactory, PluginDriver, ResolverFactory, + SharedPluginDriver, cache::{Cache, new_cache}, compilation::build_module_graph::ModuleExecutor, fast_set, include_hash,