Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions turbopack/crates/turbopack-core/src/chunk/chunking/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@ async fn batch_chunk_items_with_info_with_type(
}

/// Creates chunks based on heuristics for the passed `chunk_items`.
#[tracing::instrument(level = Level::TRACE, skip_all)]
pub async fn make_chunks(
module_graph: Vc<ModuleGraph>,
chunking_context: ResolvedVc<Box<dyn ChunkingContext>>,
Expand Down
104 changes: 57 additions & 47 deletions turbopack/crates/turbopack-core/src/chunk/chunking/production.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,18 @@ use rustc_hash::FxHasher;
use smallvec::SmallVec;
use tracing::{Instrument, field::Empty};
use turbo_prehash::BuildHasherExt;
use turbo_tasks::{FxIndexMap, FxIndexSet, ResolvedVc, TryJoinIterExt, Vc};
use turbo_tasks::{FxIndexMap, FxIndexSet, MappedReadRef, ReadRef, ResolvedVc, TryJoinIterExt, Vc};

use crate::{
chunk::{
ChunkItemBatchGroup, ChunkItemWithAsyncModuleInfo, ChunkingConfig,
ChunkItemBatchGroup, ChunkItemBatchWithAsyncModuleInfo, ChunkItemWithAsyncModuleInfo,
ChunkingConfig,
chunking::{ChunkItemOrBatchWithInfo, SplitContext, make_chunk},
},
module_graph::{ModuleGraph, chunk_group_info::RoaringBitmapWrapper},
module_graph::{
ModuleGraph,
chunk_group_info::{ModuleToChunkGroups, RoaringBitmapWrapper},
},
};

pub async fn make_production_chunks(
Expand All @@ -32,7 +36,7 @@ pub async fn make_production_chunks(
);
let span = span_outer.clone();
async move {
let chunk_group_info = module_graph.chunk_group_info().await?;
let module_chunk_groups = module_graph.chunk_group_info().module_chunk_groups();
let merged_modules = module_graph.merged_modules().await?;

#[derive(Default)]
Expand All @@ -43,56 +47,58 @@ pub async fn make_production_chunks(

let mut grouped_chunk_items = FxIndexMap::<_, GroupedChunkItems<'_>>::default();

enum Prepared {
ChunkItem(MappedReadRef<ModuleToChunkGroups, RoaringBitmapWrapper>),
Batch(ReadRef<ChunkItemBatchWithAsyncModuleInfo>),
None,
}

// Helper Vec to keep ReadRefs on batches and allow references into them
let batch_read_refs = chunk_items
let prepared = chunk_items
.iter()
.copied()
.map(async |item| {
Ok(
if let ChunkItemOrBatchWithInfo::Batch { batch, .. } = item {
Some(batch.await?)
} else {
None
},
)
Ok(match item {
&ChunkItemOrBatchWithInfo::ChunkItem {
chunk_item:
ChunkItemWithAsyncModuleInfo {
module: Some(module),
..
},
..
} => Prepared::ChunkItem(
if let Some(module_chunk_groups) =
module_chunk_groups.get(&ResolvedVc::upcast(module)).await?
{
module_chunk_groups
} else {
// Merged modules don't have a chunk group in chunk_group_info, so
// lookup using the original module.
let original_module = merged_modules
.get_original_module(ResolvedVc::upcast(module))
.context("every module should have a chunk group")?;
module_chunk_groups
.get(&original_module)
.await?
.context("every module should have a chunk group")?
},
),
&ChunkItemOrBatchWithInfo::ChunkItem {
chunk_item: ChunkItemWithAsyncModuleInfo { module: None, .. },
..
} => Prepared::None,
ChunkItemOrBatchWithInfo::Batch { batch, .. } => Prepared::Batch(batch.await?),
})
})
.try_join()
.await?;

let batch_group_read_refs = batch_groups.iter().try_join().await?;

// Put chunk items into `grouped_chunk_items` based on their chunk groups
for (i, chunk_item) in chunk_items.into_iter().enumerate() {
let chunk_groups = match chunk_item {
&ChunkItemOrBatchWithInfo::ChunkItem {
chunk_item:
ChunkItemWithAsyncModuleInfo {
module: Some(module),
..
},
..
} => Some(
chunk_group_info
.module_chunk_groups
.get(&ResolvedVc::upcast(module))
.or_else(|| {
// Merged modules don't have a chunk group in chunk_group_info, so
// lookup using the original module.
merged_modules
.get_original_module(ResolvedVc::upcast(module))
.and_then(|module| {
chunk_group_info.module_chunk_groups.get(&module)
})
})
.context("every module should have a chunk group")?,
),
&ChunkItemOrBatchWithInfo::ChunkItem {
chunk_item: ChunkItemWithAsyncModuleInfo { module: None, .. },
..
} => None,
ChunkItemOrBatchWithInfo::Batch { .. } => {
batch_read_refs[i].as_ref().unwrap().chunk_groups.as_ref()
}
for (chunk_item, prepared) in chunk_items.into_iter().zip(prepared.iter()) {
let chunk_groups = match prepared {
Prepared::None => None,
Prepared::ChunkItem(data) => Some(&**data),
Prepared::Batch(data) => data.chunk_groups.as_ref(),
};
let key = BuildHasherDefault::<FxHasher>::default().prehash(chunk_groups);
grouped_chunk_items
Expand All @@ -102,8 +108,12 @@ pub async fn make_production_chunks(
.push(chunk_item);
}

for (i, batch_group) in batch_groups.into_iter().enumerate() {
let data = &batch_group_read_refs[i].chunk_groups;
let batch_group_read_refs = batch_groups.iter().try_join().await?;

for (batch_group, batch_group_read_ref) in
batch_groups.into_iter().zip(batch_group_read_refs.iter())
{
let data = &batch_group_read_ref.chunk_groups;
let key = BuildHasherDefault::<FxHasher>::default().prehash(Some(data));
grouped_chunk_items.entry(key).or_default().batch_group = Some(batch_group);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,12 @@ impl Hash for RoaringBitmapWrapper {
}
}

#[turbo_tasks::value(transparent, cell = "keyed")]
pub struct ModuleToChunkGroups(FxHashMap<ResolvedVc<Box<dyn Module>>, RoaringBitmapWrapper>);

#[turbo_tasks::value]
pub struct ChunkGroupInfo {
pub module_chunk_groups: FxHashMap<ResolvedVc<Box<dyn Module>>, RoaringBitmapWrapper>,
pub module_chunk_groups: ResolvedVc<ModuleToChunkGroups>,
#[turbo_tasks(trace_ignore)]
#[bincode(with = "turbo_bincode::indexset")]
pub chunk_groups: FxIndexSet<ChunkGroup>,
Expand All @@ -95,6 +98,11 @@ pub struct ChunkGroupInfo {

#[turbo_tasks::value_impl]
impl ChunkGroupInfo {
#[turbo_tasks::function]
pub fn module_chunk_groups(&self) -> Vc<ModuleToChunkGroups> {
*self.module_chunk_groups
}

#[turbo_tasks::function]
pub async fn get_index_of(&self, chunk_group: ChunkGroup) -> Result<Vc<usize>> {
if let Some(idx) = self.chunk_groups.get_index_of(&chunk_group) {
Expand Down Expand Up @@ -736,7 +744,7 @@ pub async fn compute_chunk_group_info(graph: &ModuleGraph) -> Result<Vc<ChunkGro
}

Ok(ChunkGroupInfo {
module_chunk_groups,
module_chunk_groups: ResolvedVc::cell(module_chunk_groups),
chunk_group_keys: chunk_groups_map.keys().cloned().collect(),
chunk_groups: chunk_groups_map
.into_iter()
Expand Down
25 changes: 11 additions & 14 deletions turbopack/crates/turbopack-core/src/module_graph/module_batches.rs
Original file line number Diff line number Diff line change
Expand Up @@ -257,18 +257,17 @@ impl PreBatches {
fn ensure_pre_batch_for_module(
&mut self,
module: ResolvedVc<Box<dyn Module>>,
chunk_group_info: &ChunkGroupInfo,
module_chunk_groups: &FxHashMap<ResolvedVc<Box<dyn Module>>, RoaringBitmapWrapper>,
queue: &mut VecDeque<(ResolvedVc<Box<dyn Module>>, PreBatchIndex)>,
) -> Result<PreBatchIndex> {
Ok(match self.entries.entry(module) {
Entry::Vacant(e) => {
let index = self.batches.len();
queue.push_back((module, index));
let chunk_groups = chunk_group_info
.module_chunk_groups
let chunk_groups = module_chunk_groups
.get(&module)
.context("all modules need to have chunk group info")?;
let batch = PreBatch::new(chunk_groups.clone());
let batch = PreBatch::new((*chunk_groups).clone());
self.batches.push(batch);
e.insert(index);
index
Expand All @@ -280,7 +279,7 @@ impl PreBatches {
async fn get_pre_batch_items(
&mut self,
entry: ResolvedVc<Box<dyn Module>>,
chunk_group_info: &ChunkGroupInfo,
module_chunk_groups: &FxHashMap<ResolvedVc<Box<dyn Module>>, RoaringBitmapWrapper>,
module_graph: &ModuleGraph,
queue: &mut VecDeque<(ResolvedVc<Box<dyn Module>>, PreBatchIndex)>,
) -> Result<Vec<PreBatchItem>> {
Expand Down Expand Up @@ -312,7 +311,7 @@ impl PreBatches {
if parent_info.is_some() && state.this.boundary_modules.contains(&module) {
let idx = state.this.ensure_pre_batch_for_module(
module,
chunk_group_info,
module_chunk_groups,
queue,
)?;
state.items.push(PreBatchItem::ParallelReference(idx));
Expand Down Expand Up @@ -349,6 +348,7 @@ pub async fn compute_module_batches(
let span = outer_span.clone();
async move {
let chunk_group_info = module_graph.chunk_group_info().await?;
let module_chunk_groups = chunk_group_info.module_chunk_groups.await?;
let module_graph = module_graph.await?;

let mut pre_batches = PreBatches::new();
Expand All @@ -364,12 +364,10 @@ pub async fn compute_module_batches(
return Ok(());
};
if ty.chunking_type.is_parallel() {
let parent_chunk_groups = chunk_group_info
.module_chunk_groups
let parent_chunk_groups = module_chunk_groups
.get(&parent)
.context("all modules need to have chunk group info")?;
let chunk_groups = chunk_group_info
.module_chunk_groups
let chunk_groups = module_chunk_groups
.get(&node)
.context("all modules need to have chunk group info")?;
if parent_chunk_groups != chunk_groups {
Expand Down Expand Up @@ -415,7 +413,7 @@ pub async fn compute_module_batches(
// Start with the entries
for chunk_group in &chunk_group_info.chunk_groups {
for entry in chunk_group.entries() {
pre_batches.ensure_pre_batch_for_module(entry, &chunk_group_info, &mut queue)?;
pre_batches.ensure_pre_batch_for_module(entry, &module_chunk_groups, &mut queue)?;
}
if let Some(parent) = chunk_group.get_merged_parent() {
chunk_group_indices_with_merged_children.insert(parent);
Expand All @@ -428,7 +426,7 @@ pub async fn compute_module_batches(
let items = pre_batches
.get_pre_batch_items(
chunkable_module,
&chunk_group_info,
&module_chunk_groups,
&module_graph,
&mut queue,
)
Expand Down Expand Up @@ -773,8 +771,7 @@ pub async fn compute_module_batches(
batch_groups.entry(key).or_default().push(batch);
}
for &module in &pre_batches.single_module_entries {
let chunk_groups = chunk_group_info
.module_chunk_groups
let chunk_groups = module_chunk_groups
.get(&module)
.context("all modules need to have chunk group info")?;
let key = BuildHasherDefault::<FxHasher>::default().prehash(chunk_groups);
Expand Down
Loading