summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_monomorphize/src/partitioning/mod.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_monomorphize/src/partitioning/mod.rs')
-rw-r--r--compiler/rustc_monomorphize/src/partitioning/mod.rs197
1 files changed, 136 insertions, 61 deletions
diff --git a/compiler/rustc_monomorphize/src/partitioning/mod.rs b/compiler/rustc_monomorphize/src/partitioning/mod.rs
index 18aa0742c..d0b23ca9e 100644
--- a/compiler/rustc_monomorphize/src/partitioning/mod.rs
+++ b/compiler/rustc_monomorphize/src/partitioning/mod.rs
@@ -93,7 +93,6 @@
//! inlining, even when they are not marked `#[inline]`.
mod default;
-mod merging;
use std::cmp;
use std::fs::{self, File};
@@ -106,8 +105,8 @@ use rustc_hir::def_id::{DefIdSet, LOCAL_CRATE};
use rustc_middle::mir;
use rustc_middle::mir::mono::MonoItem;
use rustc_middle::mir::mono::{CodegenUnit, Linkage};
+use rustc_middle::query::Providers;
use rustc_middle::ty::print::with_no_trimmed_paths;
-use rustc_middle::ty::query::Providers;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::{DumpMonoStatsFormat, SwitchWithOptPath};
use rustc_span::symbol::Symbol;
@@ -118,58 +117,135 @@ use crate::errors::{
CouldntDumpMonoStats, SymbolAlreadyDefined, UnknownCguCollectionMode, UnknownPartitionStrategy,
};
-pub struct PartitioningCx<'a, 'tcx> {
+enum Partitioner {
+ Default(default::DefaultPartitioning),
+ // Other partitioning strategies can go here.
+ Unknown,
+}
+
+impl<'tcx> Partition<'tcx> for Partitioner {
+ fn place_root_mono_items<I>(
+ &mut self,
+ cx: &PartitioningCx<'_, 'tcx>,
+ mono_items: &mut I,
+ ) -> PlacedRootMonoItems<'tcx>
+ where
+ I: Iterator<Item = MonoItem<'tcx>>,
+ {
+ match self {
+ Partitioner::Default(partitioner) => partitioner.place_root_mono_items(cx, mono_items),
+ Partitioner::Unknown => cx.tcx.sess.emit_fatal(UnknownPartitionStrategy),
+ }
+ }
+
+ fn merge_codegen_units(
+ &mut self,
+ cx: &PartitioningCx<'_, 'tcx>,
+ codegen_units: &mut Vec<CodegenUnit<'tcx>>,
+ ) {
+ match self {
+ Partitioner::Default(partitioner) => partitioner.merge_codegen_units(cx, codegen_units),
+ Partitioner::Unknown => cx.tcx.sess.emit_fatal(UnknownPartitionStrategy),
+ }
+ }
+
+ fn place_inlined_mono_items(
+ &mut self,
+ cx: &PartitioningCx<'_, 'tcx>,
+ codegen_units: &mut [CodegenUnit<'tcx>],
+ roots: FxHashSet<MonoItem<'tcx>>,
+ ) -> FxHashMap<MonoItem<'tcx>, MonoItemPlacement> {
+ match self {
+ Partitioner::Default(partitioner) => {
+ partitioner.place_inlined_mono_items(cx, codegen_units, roots)
+ }
+ Partitioner::Unknown => cx.tcx.sess.emit_fatal(UnknownPartitionStrategy),
+ }
+ }
+
+ fn internalize_symbols(
+ &mut self,
+ cx: &PartitioningCx<'_, 'tcx>,
+ codegen_units: &mut [CodegenUnit<'tcx>],
+ mono_item_placements: FxHashMap<MonoItem<'tcx>, MonoItemPlacement>,
+ internalization_candidates: FxHashSet<MonoItem<'tcx>>,
+ ) {
+ match self {
+ Partitioner::Default(partitioner) => partitioner.internalize_symbols(
+ cx,
+ codegen_units,
+ mono_item_placements,
+ internalization_candidates,
+ ),
+ Partitioner::Unknown => cx.tcx.sess.emit_fatal(UnknownPartitionStrategy),
+ }
+ }
+}
+
+struct PartitioningCx<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
target_cgu_count: usize,
inlining_map: &'a InliningMap<'tcx>,
}
-trait Partitioner<'tcx> {
- fn place_root_mono_items(
+pub struct PlacedRootMonoItems<'tcx> {
+ codegen_units: Vec<CodegenUnit<'tcx>>,
+ roots: FxHashSet<MonoItem<'tcx>>,
+ internalization_candidates: FxHashSet<MonoItem<'tcx>>,
+}
+
+trait Partition<'tcx> {
+ fn place_root_mono_items<I>(
&mut self,
cx: &PartitioningCx<'_, 'tcx>,
- mono_items: &mut dyn Iterator<Item = MonoItem<'tcx>>,
- ) -> PreInliningPartitioning<'tcx>;
+ mono_items: &mut I,
+ ) -> PlacedRootMonoItems<'tcx>
+ where
+ I: Iterator<Item = MonoItem<'tcx>>;
fn merge_codegen_units(
&mut self,
cx: &PartitioningCx<'_, 'tcx>,
- initial_partitioning: &mut PreInliningPartitioning<'tcx>,
+ codegen_units: &mut Vec<CodegenUnit<'tcx>>,
);
fn place_inlined_mono_items(
&mut self,
cx: &PartitioningCx<'_, 'tcx>,
- initial_partitioning: PreInliningPartitioning<'tcx>,
- ) -> PostInliningPartitioning<'tcx>;
+ codegen_units: &mut [CodegenUnit<'tcx>],
+ roots: FxHashSet<MonoItem<'tcx>>,
+ ) -> FxHashMap<MonoItem<'tcx>, MonoItemPlacement>;
fn internalize_symbols(
&mut self,
cx: &PartitioningCx<'_, 'tcx>,
- partitioning: &mut PostInliningPartitioning<'tcx>,
+ codegen_units: &mut [CodegenUnit<'tcx>],
+ mono_item_placements: FxHashMap<MonoItem<'tcx>, MonoItemPlacement>,
+ internalization_candidates: FxHashSet<MonoItem<'tcx>>,
);
}
-fn get_partitioner<'tcx>(tcx: TyCtxt<'tcx>) -> Box<dyn Partitioner<'tcx>> {
+fn get_partitioner(tcx: TyCtxt<'_>) -> Partitioner {
let strategy = match &tcx.sess.opts.unstable_opts.cgu_partitioning_strategy {
None => "default",
Some(s) => &s[..],
};
match strategy {
- "default" => Box::new(default::DefaultPartitioning),
- _ => {
- tcx.sess.emit_fatal(UnknownPartitionStrategy);
- }
+ "default" => Partitioner::Default(default::DefaultPartitioning),
+ _ => Partitioner::Unknown,
}
}
-pub fn partition<'tcx>(
+fn partition<'tcx, I>(
tcx: TyCtxt<'tcx>,
- mono_items: &mut dyn Iterator<Item = MonoItem<'tcx>>,
+ mono_items: &mut I,
max_cgu_count: usize,
inlining_map: &InliningMap<'tcx>,
-) -> Vec<CodegenUnit<'tcx>> {
+) -> Vec<CodegenUnit<'tcx>>
+where
+ I: Iterator<Item = MonoItem<'tcx>>,
+{
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning");
let mut partitioner = get_partitioner(tcx);
@@ -177,40 +253,51 @@ pub fn partition<'tcx>(
// In the first step, we place all regular monomorphizations into their
// respective 'home' codegen unit. Regular monomorphizations are all
// functions and statics defined in the local crate.
- let mut initial_partitioning = {
+ let PlacedRootMonoItems { mut codegen_units, roots, internalization_candidates } = {
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_roots");
partitioner.place_root_mono_items(cx, mono_items)
};
- initial_partitioning.codegen_units.iter_mut().for_each(|cgu| cgu.create_size_estimate(tcx));
+ for cgu in &mut codegen_units {
+ cgu.create_size_estimate(tcx);
+ }
- debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter());
+ debug_dump(tcx, "INITIAL PARTITIONING", &codegen_units);
// Merge until we have at most `max_cgu_count` codegen units.
+ // `merge_codegen_units` is responsible for updating the CGU size
+ // estimates.
{
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus");
- partitioner.merge_codegen_units(cx, &mut initial_partitioning);
- debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter());
+ partitioner.merge_codegen_units(cx, &mut codegen_units);
+ debug_dump(tcx, "POST MERGING", &codegen_units);
}
// In the next step, we use the inlining map to determine which additional
// monomorphizations have to go into each codegen unit. These additional
// monomorphizations can be drop-glue, functions from external crates, and
// local functions the definition of which is marked with `#[inline]`.
- let mut post_inlining = {
+ let mono_item_placements = {
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_inline_items");
- partitioner.place_inlined_mono_items(cx, initial_partitioning)
+ partitioner.place_inlined_mono_items(cx, &mut codegen_units, roots)
};
- post_inlining.codegen_units.iter_mut().for_each(|cgu| cgu.create_size_estimate(tcx));
+ for cgu in &mut codegen_units {
+ cgu.create_size_estimate(tcx);
+ }
- debug_dump(tcx, "POST INLINING:", post_inlining.codegen_units.iter());
+ debug_dump(tcx, "POST INLINING", &codegen_units);
// Next we try to make as many symbols "internal" as possible, so LLVM has
// more freedom to optimize.
if !tcx.sess.link_dead_code() {
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_internalize_symbols");
- partitioner.internalize_symbols(cx, &mut post_inlining);
+ partitioner.internalize_symbols(
+ cx,
+ &mut codegen_units,
+ mono_item_placements,
+ internalization_candidates,
+ );
}
let instrument_dead_code =
@@ -218,7 +305,7 @@ pub fn partition<'tcx>(
if instrument_dead_code {
assert!(
- post_inlining.codegen_units.len() > 0,
+ codegen_units.len() > 0,
"There must be at least one CGU that code coverage data can be generated in."
);
@@ -229,7 +316,7 @@ pub fn partition<'tcx>(
// the object file (CGU) containing the dead function stubs is included
// in the final binary. This will probably require forcing these
// function symbols to be included via `-u` or `/include` linker args.
- let mut cgus: Vec<_> = post_inlining.codegen_units.iter_mut().collect();
+ let mut cgus: Vec<_> = codegen_units.iter_mut().collect();
cgus.sort_by_key(|cgu| cgu.size_estimate());
let dead_code_cgu =
@@ -240,27 +327,17 @@ pub fn partition<'tcx>(
} else {
// If there are no CGUs that have externally linked items,
// then we just pick the first CGU as a fallback.
- &mut post_inlining.codegen_units[0]
+ &mut codegen_units[0]
};
dead_code_cgu.make_code_coverage_dead_code_cgu();
}
// Finally, sort by codegen unit name, so that we get deterministic results.
- let PostInliningPartitioning {
- codegen_units: mut result,
- mono_item_placements: _,
- internalization_candidates: _,
- } = post_inlining;
+ codegen_units.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
- result.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
+ debug_dump(tcx, "FINAL", &codegen_units);
- result
-}
-
-pub struct PreInliningPartitioning<'tcx> {
- codegen_units: Vec<CodegenUnit<'tcx>>,
- roots: FxHashSet<MonoItem<'tcx>>,
- internalization_candidates: FxHashSet<MonoItem<'tcx>>,
+ codegen_units
}
/// For symbol internalization, we need to know whether a symbol/mono-item is
@@ -272,39 +349,37 @@ enum MonoItemPlacement {
MultipleCgus,
}
-struct PostInliningPartitioning<'tcx> {
- codegen_units: Vec<CodegenUnit<'tcx>>,
- mono_item_placements: FxHashMap<MonoItem<'tcx>, MonoItemPlacement>,
- internalization_candidates: FxHashSet<MonoItem<'tcx>>,
-}
-
-fn debug_dump<'a, 'tcx, I>(tcx: TyCtxt<'tcx>, label: &str, cgus: I)
-where
- I: Iterator<Item = &'a CodegenUnit<'tcx>>,
- 'tcx: 'a,
-{
+fn debug_dump<'a, 'tcx: 'a>(tcx: TyCtxt<'tcx>, label: &str, cgus: &[CodegenUnit<'tcx>]) {
let dump = move || {
use std::fmt::Write;
+ let num_cgus = cgus.len();
+ let max = cgus.iter().map(|cgu| cgu.size_estimate()).max().unwrap();
+ let min = cgus.iter().map(|cgu| cgu.size_estimate()).min().unwrap();
+ let ratio = max as f64 / min as f64;
+
let s = &mut String::new();
- let _ = writeln!(s, "{label}");
+ let _ = writeln!(
+ s,
+ "{label} ({num_cgus} CodegenUnits, max={max}, min={min}, max/min={ratio:.1}):"
+ );
for cgu in cgus {
let _ =
- writeln!(s, "CodegenUnit {} estimated size {} :", cgu.name(), cgu.size_estimate());
+ writeln!(s, "CodegenUnit {} estimated size {}:", cgu.name(), cgu.size_estimate());
for (mono_item, linkage) in cgu.items() {
let symbol_name = mono_item.symbol_name(tcx).name;
let symbol_hash_start = symbol_name.rfind('h');
let symbol_hash = symbol_hash_start.map_or("<no hash>", |i| &symbol_name[i..]);
- let _ = writeln!(
+ let _ = with_no_trimmed_paths!(writeln!(
s,
" - {} [{:?}] [{}] estimated size {}",
mono_item,
linkage,
symbol_hash,
mono_item.size_estimate(tcx)
- );
+ ));
}
let _ = writeln!(s);
@@ -380,7 +455,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
|| {
let mut codegen_units = partition(
tcx,
- &mut items.iter().cloned(),
+ &mut items.iter().copied(),
tcx.sess.codegen_units(),
&inlining_map,
);