Skip to content

Commit 4f1c4e2

Browse files
committed
don't schedule unnecessary drops when lowering or-patterns
This avoids scheduling drops and immediately unscheduling them. Drops for guard bindings/temporaries are still scheduled and unscheduled as before.
1 parent 2a023bf commit 4f1c4e2

File tree

1 file changed

+29
-28
lines changed
  • compiler/rustc_mir_build/src/builder/matches

1 file changed

+29
-28
lines changed

compiler/rustc_mir_build/src/builder/matches/mod.rs

Lines changed: 29 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,11 @@
55
//! This also includes code for pattern bindings in `let` statements and
66
//! function parameters.
77
8-
use std::assert_matches::assert_matches;
98
use std::borrow::Borrow;
109
use std::mem;
1110
use std::sync::Arc;
1211

12+
use itertools::{Itertools, Position};
1313
use rustc_abi::VariantIdx;
1414
use rustc_data_structures::fx::FxIndexMap;
1515
use rustc_data_structures::stack::ensure_sufficient_stack;
@@ -561,16 +561,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
561561
// return: it isn't bound by move until right before enter the arm.
562562
// To handle this we instead unschedule it's drop after each time
563563
// we lower the guard.
564+
// As a result, we end up with the drop order of the last sub-branch we lower. To use
565+
// the drop order for the first sub-branch, we lower sub-branches in reverse (#142163).
566+
// TODO: I'm saving the breaking change for the next commit. For now, a stopgap:
567+
let sub_branch_to_use_the_drops_from =
568+
if arm_match_scope.is_some() { Position::Last } else { Position::First };
564569
let target_block = self.cfg.start_new_block();
565-
let mut schedule_drops = ScheduleDrops::Yes;
566-
let arm = arm_match_scope.unzip().0;
567-
// We keep a stack of all of the bindings and type ascriptions
568-
// from the parent candidates that we visit, that also need to
569-
// be bound for each candidate.
570-
for sub_branch in branch.sub_branches {
571-
if let Some(arm) = arm {
572-
self.clear_top_scope(arm.scope);
573-
}
570+
for (pos, sub_branch) in branch.sub_branches.into_iter().with_position() {
571+
debug_assert!(pos != Position::Only);
572+
let schedule_drops = if pos == sub_branch_to_use_the_drops_from {
573+
ScheduleDrops::Yes
574+
} else {
575+
ScheduleDrops::No
576+
};
574577
let binding_end = self.bind_and_guard_matched_candidate(
575578
sub_branch,
576579
fake_borrow_temps,
@@ -579,9 +582,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
579582
schedule_drops,
580583
emit_storage_live,
581584
);
582-
if arm.is_none() {
583-
schedule_drops = ScheduleDrops::No;
584-
}
585585
self.cfg.goto(binding_end, outer_source_info, target_block);
586586
}
587587

@@ -2453,11 +2453,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
24532453

24542454
// Bindings for guards require some extra handling to automatically
24552455
// insert implicit references/dereferences.
2456-
self.bind_matched_candidate_for_guard(
2457-
block,
2458-
schedule_drops,
2459-
sub_branch.bindings.iter(),
2460-
);
2456+
// This always schedules storage drops, so we may need to unschedule them below.
2457+
self.bind_matched_candidate_for_guard(block, sub_branch.bindings.iter());
24612458
let guard_frame = GuardFrame {
24622459
locals: sub_branch
24632460
.bindings
@@ -2489,6 +2486,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
24892486
)
24902487
});
24912488

2489+
// If this isn't the final sub-branch being lowered, we need to unschedule drops of
2490+
// bindings and temporaries created for and by the guard. As a result, the drop order
2491+
// for the arm will correspond to the binding order of the final sub-branch lowered.
2492+
if matches!(schedule_drops, ScheduleDrops::No) {
2493+
self.clear_top_scope(arm.scope);
2494+
}
2495+
24922496
let source_info = self.source_info(guard_span);
24932497
let guard_end = self.source_info(tcx.sess.source_map().end_point(guard_span));
24942498
let guard_frame = self.guard_context.pop().unwrap();
@@ -2538,14 +2542,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
25382542
let cause = FakeReadCause::ForGuardBinding;
25392543
self.cfg.push_fake_read(post_guard_block, guard_end, cause, Place::from(local_id));
25402544
}
2541-
assert_matches!(
2542-
schedule_drops,
2543-
ScheduleDrops::Yes,
2544-
"patterns with guards must schedule drops"
2545-
);
2545+
// Only schedule drops for the last sub-branch we lower.
25462546
self.bind_matched_candidate_for_arm_body(
25472547
post_guard_block,
2548-
ScheduleDrops::Yes,
2548+
schedule_drops,
25492549
by_value_bindings,
25502550
emit_storage_live,
25512551
);
@@ -2671,7 +2671,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
26712671
fn bind_matched_candidate_for_guard<'b>(
26722672
&mut self,
26732673
block: BasicBlock,
2674-
schedule_drops: ScheduleDrops,
26752674
bindings: impl IntoIterator<Item = &'b Binding<'tcx>>,
26762675
) where
26772676
'tcx: 'b,
@@ -2690,12 +2689,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
26902689
// a reference R: &T pointing to the location matched by
26912690
// the pattern, and every occurrence of P within a guard
26922691
// denotes *R.
2692+
// Drops must be scheduled to emit `StorageDead` on the guard's failure/break branches.
26932693
let ref_for_guard = self.storage_live_binding(
26942694
block,
26952695
binding.var_id,
26962696
binding.span,
26972697
RefWithinGuard,
2698-
schedule_drops,
2698+
ScheduleDrops::Yes,
26992699
);
27002700
match binding.binding_mode.0 {
27012701
ByRef::No => {
@@ -2705,13 +2705,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
27052705
self.cfg.push_assign(block, source_info, ref_for_guard, rvalue);
27062706
}
27072707
ByRef::Yes(mutbl) => {
2708-
// The arm binding will be by reference, so eagerly create it now.
2708+
// The arm binding will be by reference, so eagerly create it now. Drops must
2709+
// be scheduled to emit `StorageDead` on the guard's failure/break branches.
27092710
let value_for_arm = self.storage_live_binding(
27102711
block,
27112712
binding.var_id,
27122713
binding.span,
27132714
OutsideGuard,
2714-
schedule_drops,
2715+
ScheduleDrops::Yes,
27152716
);
27162717

27172718
let rvalue =

0 commit comments

Comments
 (0)