rspack_plugin_limit_chunk_count/
lib.rs

1mod chunk_combination;
2
3use std::collections::HashSet;
4
5use chunk_combination::{ChunkCombination, ChunkCombinationBucket, ChunkCombinationUkey};
6use rspack_collections::{UkeyMap, UkeySet};
7use rspack_core::{
8  ChunkSizeOptions, ChunkUkey, Compilation, CompilationOptimizeChunks, Plugin,
9  compare_chunks_with_graph, incremental::Mutation,
10};
11use rspack_error::Result;
12use rspack_hook::{plugin, plugin_hook};
13
14fn add_to_set_map(
15  map: &mut UkeyMap<ChunkUkey, UkeySet<ChunkCombinationUkey>>,
16  key: &ChunkUkey,
17  value: ChunkCombinationUkey,
18) {
19  if map.get(key).is_none() {
20    let mut set = UkeySet::default();
21    set.insert(value);
22    map.insert(*key, set);
23  } else {
24    let set = map.get_mut(key);
25    if let Some(set) = set {
26      set.insert(value);
27    } else {
28      let mut set = UkeySet::default();
29      set.insert(value);
30      map.insert(*key, set);
31    }
32  }
33}
34
35#[derive(Debug, Clone, Default)]
36pub struct LimitChunkCountPluginOptions {
37  // Constant overhead for a chunk.
38  pub chunk_overhead: Option<f64>,
39  //  Multiplicator for initial chunks.
40  pub entry_chunk_multiplicator: Option<f64>,
41  // Limit the maximum number of chunks using a value greater greater than or equal to 1.
42  pub max_chunks: usize,
43}
44
45#[plugin]
46#[derive(Debug)]
47pub struct LimitChunkCountPlugin {
48  options: LimitChunkCountPluginOptions,
49}
50
51impl LimitChunkCountPlugin {
52  pub fn new(options: LimitChunkCountPluginOptions) -> Self {
53    Self::new_inner(options)
54  }
55}
56
57#[plugin_hook(CompilationOptimizeChunks for LimitChunkCountPlugin, stage = Compilation::OPTIMIZE_CHUNKS_STAGE_ADVANCED)]
58async fn optimize_chunks(&self, compilation: &mut Compilation) -> Result<Option<bool>> {
59  let max_chunks = self.options.max_chunks;
60  if max_chunks < 1 {
61    return Ok(None);
62  }
63
64  let mut chunks_ukeys = compilation
65    .chunk_by_ukey
66    .keys()
67    .copied()
68    .collect::<Vec<_>>();
69  if chunks_ukeys.len() <= max_chunks {
70    return Ok(None);
71  }
72
73  let chunk_by_ukey = &compilation.chunk_by_ukey.clone();
74  let chunk_group_by_ukey = &compilation.chunk_group_by_ukey.clone();
75  let chunk_graph = &compilation.chunk_graph.clone();
76  let mut new_chunk_by_ukey = std::mem::take(&mut compilation.chunk_by_ukey);
77  let mut new_chunk_group_by_ukey = std::mem::take(&mut compilation.chunk_group_by_ukey);
78  let mut new_chunk_graph = std::mem::take(&mut compilation.chunk_graph);
79
80  //    let chunk_graph = &compilation.chunk_graph.clone();
81  let module_graph = compilation.get_module_graph();
82  let mut remaining_chunks_to_merge = (chunks_ukeys.len() - max_chunks) as i64;
83
84  // order chunks in a deterministic way
85  chunks_ukeys.sort_by(|a, b| compare_chunks_with_graph(chunk_graph, &module_graph, a, b));
86
87  // create a lazy sorted data structure to keep all combinations
88  // this is large. Size = chunks * (chunks - 1) / 2
89  // It uses a multi layer bucket sort plus normal sort in the last layer
90  // It's also lazy so only accessed buckets are sorted
91  let mut combinations = ChunkCombinationBucket::new();
92
93  // we keep a mapping from chunk to all combinations
94  // but this mapping is not kept up-to-date with deletions
95  // so `deleted` flag need to be considered when iterating this
96  let mut combinations_by_chunk: UkeyMap<ChunkUkey, UkeySet<ChunkCombinationUkey>> =
97    UkeyMap::default();
98
99  let chunk_size_option = ChunkSizeOptions {
100    chunk_overhead: self.options.chunk_overhead,
101    entry_chunk_multiplicator: self.options.entry_chunk_multiplicator,
102  };
103
104  for (b_idx, b) in chunks_ukeys.iter().enumerate() {
105    for (a_idx, a) in chunks_ukeys.iter().enumerate().take(b_idx) {
106      if !chunk_graph.can_chunks_be_integrated(a, b, chunk_by_ukey, chunk_group_by_ukey) {
107        continue;
108      }
109
110      let integrated_size = chunk_graph.get_integrated_chunks_size(
111        a,
112        b,
113        &chunk_size_option,
114        chunk_by_ukey,
115        chunk_group_by_ukey,
116        &module_graph,
117        compilation,
118      );
119      let a_size = chunk_graph.get_chunk_size(
120        a,
121        &chunk_size_option,
122        chunk_by_ukey,
123        chunk_group_by_ukey,
124        &module_graph,
125        compilation,
126      );
127      let b_size = chunk_graph.get_chunk_size(
128        b,
129        &chunk_size_option,
130        chunk_by_ukey,
131        chunk_group_by_ukey,
132        &module_graph,
133        compilation,
134      );
135
136      let c = ChunkCombination {
137        ukey: ChunkCombinationUkey::new(),
138        deleted: false,
139        size_diff: a_size + b_size - integrated_size,
140        integrated_size,
141        a: *a,
142        b: *b,
143        a_idx,
144        b_idx,
145        a_size,
146        b_size,
147      };
148
149      add_to_set_map(&mut combinations_by_chunk, a, c.ukey);
150      add_to_set_map(&mut combinations_by_chunk, b, c.ukey);
151      combinations.add(c);
152    }
153  }
154
155  let mut removed_chunks: UkeySet<ChunkUkey> = UkeySet::default();
156  let mut integrated_chunks: UkeySet<ChunkUkey> = UkeySet::default();
157  // list of modified chunks during this run
158  // combinations affected by this change are skipped to allow
159  // further optimizations
160  let mut modified_chunks: UkeySet<ChunkUkey> = UkeySet::default();
161
162  while let Some(combination_ukey) = combinations.pop_first() {
163    let combination = combinations.get_mut(&combination_ukey);
164    combination.deleted = true;
165    let a = combination.a;
166    let b = combination.b;
167    let integrated_size = combination.integrated_size;
168
169    // skip over pair when
170    // one of the already merged chunks is a parent of one of the chunks
171    if !modified_chunks.is_empty() {
172      let a_chunk = chunk_by_ukey.expect_get(&a);
173      let b_chunk = chunk_by_ukey.expect_get(&b);
174      let mut queue = a_chunk.groups().iter().copied().collect::<HashSet<_>>();
175      for group_ukey in b_chunk.groups().iter() {
176        queue.insert(*group_ukey);
177      }
178      for group_ukey in queue.clone() {
179        for modified_chunk_ukey in modified_chunks.clone() {
180          if let Some(m_chunk) = chunk_by_ukey.get(&modified_chunk_ukey)
181            && modified_chunk_ukey != a
182            && modified_chunk_ukey != b
183            && m_chunk.is_in_group(&group_ukey)
184          {
185            remaining_chunks_to_merge -= 1;
186            if remaining_chunks_to_merge <= 0 {
187              break;
188            }
189            modified_chunks.insert(a);
190            modified_chunks.insert(b);
191            continue;
192          }
193        }
194        if let Some(group) = chunk_group_by_ukey.get(&group_ukey) {
195          for parent in group.parents_iterable() {
196            queue.insert(*parent);
197          }
198        }
199      }
200    }
201
202    if chunk_graph.can_chunks_be_integrated(&a, &b, chunk_by_ukey, chunk_group_by_ukey) {
203      new_chunk_graph.integrate_chunks(
204        &a,
205        &b,
206        &mut new_chunk_by_ukey,
207        &mut new_chunk_group_by_ukey,
208        &module_graph,
209      );
210      integrated_chunks.insert(a);
211      new_chunk_by_ukey.remove(&b);
212      removed_chunks.insert(b);
213
214      // flag chunk a as modified as further optimization are possible for all children here
215      modified_chunks.insert(a);
216
217      remaining_chunks_to_merge -= 1;
218      if remaining_chunks_to_merge <= 0 {
219        break;
220      }
221
222      // Update all affected combinations
223      // delete all combination with the removed chunk
224      // we will use combinations with the kept chunk instead
225      let a_combinations = combinations_by_chunk.get_mut(&a);
226      if let Some(a_combinations) = a_combinations {
227        for ukey in a_combinations.clone() {
228          let combination = combinations.get_mut(&ukey);
229          if combination.deleted {
230            continue;
231          }
232          combination.deleted = true;
233          combinations.delete(&ukey);
234        }
235      }
236
237      // Update combinations with the kept chunk with new sizes
238      let b_combinations = combinations_by_chunk.get(&b);
239      if let Some(b_combinations) = b_combinations {
240        for ukey in b_combinations {
241          let combination = combinations.get_mut(ukey);
242          if combination.deleted {
243            continue;
244          }
245          if combination.a == b {
246            if !chunk_graph.can_chunks_be_integrated(&a, &b, chunk_by_ukey, chunk_group_by_ukey) {
247              combination.deleted = true;
248              combinations.delete(ukey);
249              continue;
250            }
251            // Update size
252            let new_integrated_size = chunk_graph.get_integrated_chunks_size(
253              &a,
254              &combination.b,
255              &chunk_size_option,
256              chunk_by_ukey,
257              chunk_group_by_ukey,
258              &module_graph,
259              compilation,
260            );
261            combination.a = a;
262            combination.integrated_size = new_integrated_size;
263            combination.a_size = integrated_size;
264            combination.size_diff = combination.b_size + integrated_size - new_integrated_size;
265            combinations.update();
266          } else if combination.b == b {
267            if !chunk_graph.can_chunks_be_integrated(&b, &a, chunk_by_ukey, chunk_group_by_ukey) {
268              combination.deleted = true;
269              combinations.delete(ukey);
270              continue;
271            }
272            // Update size
273            let new_integrated_size = chunk_graph.get_integrated_chunks_size(
274              &combination.a,
275              &a,
276              &chunk_size_option,
277              chunk_by_ukey,
278              chunk_group_by_ukey,
279              &module_graph,
280              compilation,
281            );
282            combination.b = a;
283            combination.integrated_size = new_integrated_size;
284            combination.b_size = integrated_size;
285            combination.size_diff = integrated_size + combination.a_size - new_integrated_size;
286            combinations.update();
287          }
288        }
289      }
290      let combinations = combinations_by_chunk
291        .get(&b)
292        .expect("chunk combination not found");
293      combinations_by_chunk.insert(a, combinations.clone());
294      combinations_by_chunk.remove(&b);
295    }
296  }
297
298  compilation.chunk_by_ukey = new_chunk_by_ukey;
299  compilation.chunk_group_by_ukey = new_chunk_group_by_ukey;
300  compilation.chunk_graph = new_chunk_graph;
301
302  if let Some(mutations) = compilation.incremental.mutations_write() {
303    // ChunkRemove mutations must be added last because a chunk can be removed after another chunk
304    // has been integrated into it
305    for chunk in integrated_chunks {
306      mutations.add(Mutation::ChunksIntegrate { to: chunk });
307    }
308    for chunk in removed_chunks {
309      mutations.add(Mutation::ChunkRemove { chunk });
310    }
311  }
312
313  Ok(None)
314}
315
316impl Plugin for LimitChunkCountPlugin {
317  fn name(&self) -> &'static str {
318    "LimitChunkCountPlugin"
319  }
320
321  fn apply(&self, ctx: &mut rspack_core::ApplyContext<'_>) -> Result<()> {
322    ctx
323      .compilation_hooks
324      .optimize_chunks
325      .tap(optimize_chunks::new(self));
326    Ok(())
327  }
328}