Auto merge of #61872 - matthewjasper:refactor-mir-drop-gen, r=nikomatsakis

Clean up MIR drop generation

* Don't assign twice to the destination of a `while` loop containing a `break` expression
* Use `as_temp` to evaluate statement expression
* Avoid consecutive `StorageLive`s for the condition of a `while` loop
* Unify `return`, `break` and `continue` handling, and move it to `scopes.rs`
* Make some of the `scopes.rs` internals private
* Don't use `Place`s that are always `Local`s in MIR drop generation

Closes #42371
Closes #61579
Closes #61731
Closes #61834
Closes #61910
Closes #62115
This commit is contained in:
bors 2019-06-26 04:42:34 +00:00
commit d3e2cec292
27 changed files with 536 additions and 468 deletions

View File

@ -78,7 +78,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let source_info = this.source_info(span);
for stmt in stmts {
let Stmt { kind, opt_destruction_scope, span: stmt_span } = this.hir.mirror(stmt);
let Stmt { kind, opt_destruction_scope } = this.hir.mirror(stmt);
match kind {
StmtKind::Expr { scope, expr } => {
this.block_context.push(BlockFrame::Statement { ignores_expr_result: true });
@ -87,7 +87,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let si = (scope, source_info);
this.in_scope(si, LintLevel::Inherited, |this| {
let expr = this.hir.mirror(expr);
this.stmt_expr(block, expr, Some(stmt_span))
this.stmt_expr(block, expr, Some(scope))
})
}));
}

View File

@ -127,7 +127,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.schedule_drop_storage_and_value(
expr_span,
scope,
&Place::from(result),
result,
value.ty,
);
}
@ -559,7 +559,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.schedule_drop_storage_and_value(
upvar_span,
temp_lifetime,
&Place::from(temp),
temp,
upvar_ty,
);
}

View File

@ -88,7 +88,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.schedule_drop(
expr_span,
temp_lifetime,
temp_place,
temp,
expr_ty,
DropKind::Storage,
);
@ -101,7 +101,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.schedule_drop(
expr_span,
temp_lifetime,
temp_place,
temp,
expr_ty,
DropKind::Value,
);

View File

@ -179,20 +179,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// conduct the test, if necessary
let body_block;
if let Some(cond_expr) = opt_cond_expr {
let loop_block_end;
let cond = unpack!(
loop_block_end = this.as_local_operand(loop_block, cond_expr)
);
body_block = this.cfg.start_new_block();
let term =
TerminatorKind::if_(this.hir.tcx(), cond, body_block, exit_block);
this.cfg.terminate(loop_block_end, source_info, term);
let cond_expr = this.hir.mirror(cond_expr);
let (true_block, false_block)
= this.test_bool(loop_block, cond_expr, source_info);
body_block = true_block;
// if the test is false, there's no `break` to assign `destination`, so
// we have to do it; this overwrites any `break`-assigned value but it's
// always `()` anyway
this.cfg
.push_assign_unit(exit_block, source_info, destination);
// we have to do it
this.cfg.push_assign_unit(false_block, source_info, destination);
this.cfg.terminate(
false_block,
source_info,
TerminatorKind::Goto { target: exit_block },
);
} else {
body_block = this.cfg.start_new_block();
let diverge_cleanup = this.diverge_cleanup();

View File

@ -1,6 +1,7 @@
use crate::build::scope::BreakableScope;
use crate::build::scope::BreakableTarget;
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*;
impl<'a, 'tcx> Builder<'a, 'tcx> {
@ -8,14 +9,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// If the original expression was an AST statement,
/// (e.g., `some().code(&here());`) then `opt_stmt_span` is the
/// span of that statement (including its semicolon, if any).
/// Diagnostics use this span (which may be larger than that of
/// `expr`) to identify when statement temporaries are dropped.
pub fn stmt_expr(&mut self,
mut block: BasicBlock,
expr: Expr<'tcx>,
opt_stmt_span: Option<StatementSpan>)
-> BlockAnd<()>
{
/// The scope is used if a statement temporary must be dropped.
pub fn stmt_expr(
&mut self,
mut block: BasicBlock,
expr: Expr<'tcx>,
statement_scope: Option<region::Scope>,
) -> BlockAnd<()> {
let this = self;
let expr_span = expr.span;
let source_info = this.source_info(expr.span);
@ -30,7 +30,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} => {
let value = this.hir.mirror(value);
this.in_scope((region_scope, source_info), lint_level, |this| {
this.stmt_expr(block, value, opt_stmt_span)
this.stmt_expr(block, value, statement_scope)
})
}
ExprKind::Assign { lhs, rhs } => {
@ -98,70 +98,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
block.unit()
}
ExprKind::Continue { label } => {
let BreakableScope {
continue_block,
region_scope,
..
} = *this.find_breakable_scope(expr_span, label);
let continue_block = continue_block
.expect("Attempted to continue in non-continuable breakable block");
this.exit_scope(
expr_span,
(region_scope, source_info),
block,
continue_block,
);
this.cfg.start_new_block().unit()
this.break_scope(block, None, BreakableTarget::Continue(label), source_info)
}
ExprKind::Break { label, value } => {
let (break_block, region_scope, destination) = {
let BreakableScope {
break_block,
region_scope,
ref break_destination,
..
} = *this.find_breakable_scope(expr_span, label);
(break_block, region_scope, break_destination.clone())
};
if let Some(value) = value {
debug!("stmt_expr Break val block_context.push(SubExpr) : {:?}", expr2);
this.block_context.push(BlockFrame::SubExpr);
unpack!(block = this.into(&destination, block, value));
this.block_context.pop();
} else {
this.cfg.push_assign_unit(block, source_info, &destination)
}
this.exit_scope(expr_span, (region_scope, source_info), block, break_block);
this.cfg.start_new_block().unit()
this.break_scope(block, value, BreakableTarget::Break(label), source_info)
}
ExprKind::Return { value } => {
block = match value {
Some(value) => {
debug!("stmt_expr Return val block_context.push(SubExpr) : {:?}", expr2);
this.block_context.push(BlockFrame::SubExpr);
let result = unpack!(
this.into(
&Place::RETURN_PLACE,
block,
value
)
);
this.block_context.pop();
result
}
None => {
this.cfg.push_assign_unit(
block,
source_info,
&Place::RETURN_PLACE,
);
block
}
};
let region_scope = this.region_scope_of_return_scope();
let return_block = this.return_block();
this.exit_scope(expr_span, (region_scope, source_info), block, return_block);
this.cfg.start_new_block().unit()
this.break_scope(block, value, BreakableTarget::Return, source_info)
}
ExprKind::InlineAsm {
asm,
@ -199,7 +142,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
block.unit()
}
_ => {
let expr_ty = expr.ty;
assert!(
statement_scope.is_some(),
"Should not be calling `stmt_expr` on a general expression \
without a statement scope",
);
// Issue #54382: When creating temp for the value of
// expression like:
@ -208,48 +155,34 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
//
// it is usually better to focus on `the_value` rather
// than the entirety of block(s) surrounding it.
let mut temp_span = expr_span;
let mut temp_in_tail_of_block = false;
if let ExprKind::Block { body } = expr.kind {
if let Some(tail_expr) = &body.expr {
let mut expr = tail_expr;
while let rustc::hir::ExprKind::Block(subblock, _label) = &expr.node {
if let Some(subtail_expr) = &subblock.expr {
expr = subtail_expr
} else {
break;
let adjusted_span = (|| {
if let ExprKind::Block { body } = expr.kind {
if let Some(tail_expr) = &body.expr {
let mut expr = tail_expr;
while let rustc::hir::ExprKind::Block(subblock, _label) = &expr.node {
if let Some(subtail_expr) = &subblock.expr {
expr = subtail_expr
} else {
break;
}
}
}
temp_span = expr.span;
temp_in_tail_of_block = true;
}
}
let temp = {
let mut local_decl = LocalDecl::new_temp(expr.ty.clone(), temp_span);
if temp_in_tail_of_block {
if this.block_context.currently_ignores_tail_results() {
local_decl = local_decl.block_tail(BlockTailInfo {
this.block_context.push(BlockFrame::TailExpr {
tail_result_is_ignored: true
});
return Some(expr.span);
}
}
let temp = this.local_decls.push(local_decl);
let place = Place::from(temp);
debug!("created temp {:?} for expr {:?} in block_context: {:?}",
temp, expr, this.block_context);
place
};
unpack!(block = this.into(&temp, block, expr));
None
})();
// Attribute drops of the statement's temps to the
// semicolon at the statement's end.
let drop_point = this.hir.tcx().sess.source_map().end_point(match opt_stmt_span {
None => expr_span,
Some(StatementSpan(span)) => span,
});
let temp = unpack!(block =
this.as_temp(block, statement_scope, expr, Mutability::Not));
if let Some(span) = adjusted_span {
this.local_decls[temp].source_info.span = span;
this.block_context.pop();
}
unpack!(block = this.build_drop(block, drop_point, temp, expr_ty));
block.unit()
}
}

View File

@ -531,11 +531,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
kind: StatementKind::StorageLive(local_id),
},
);
let place = Place::from(local_id);
let var_ty = self.local_decls[local_id].ty;
let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
self.schedule_drop(span, region_scope, &place, var_ty, DropKind::Storage);
place
self.schedule_drop(span, region_scope, local_id, var_ty, DropKind::Storage);
Place::Base(PlaceBase::Local(local_id))
}
pub fn schedule_drop_for_binding(&mut self, var: HirId, span: Span, for_guard: ForGuard) {
@ -545,7 +544,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.schedule_drop(
span,
region_scope,
&Place::from(local_id),
local_id,
var_ty,
DropKind::Value,
);
@ -1490,7 +1489,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
};
let source_info = self.source_info(guard.span);
let guard_end = self.source_info(tcx.sess.source_map().end_point(guard.span));
let cond = unpack!(block = self.as_local_operand(block, guard));
let (post_guard_block, otherwise_post_guard_block)
= self.test_bool(block, guard, source_info);
let guard_frame = self.guard_context.pop().unwrap();
debug!(
"Exiting guard building context with locals: {:?}",
@ -1498,7 +1498,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
);
for &(_, temp) in fake_borrows {
self.cfg.push(block, Statement {
self.cfg.push(post_guard_block, Statement {
source_info: guard_end,
kind: StatementKind::FakeRead(
FakeReadCause::ForMatchGuard,
@ -1507,6 +1507,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
});
}
self.exit_scope(
source_info.span,
region_scope,
otherwise_post_guard_block,
candidate.otherwise_block.unwrap(),
);
// We want to ensure that the matched candidates are bound
// after we have confirmed this candidate *and* any
// associated guard; Binding them on `block` is too soon,
@ -1533,41 +1540,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// ```
//
// and that is clearly not correct.
let post_guard_block = self.cfg.start_new_block();
let otherwise_post_guard_block = self.cfg.start_new_block();
self.cfg.terminate(
block,
source_info,
TerminatorKind::if_(
self.hir.tcx(),
cond.clone(),
post_guard_block,
otherwise_post_guard_block,
),
);
self.exit_scope(
source_info.span,
region_scope,
otherwise_post_guard_block,
candidate.otherwise_block.unwrap(),
);
if let Operand::Copy(cond_place) | Operand::Move(cond_place) = cond {
if let Place::Base(PlaceBase::Local(cond_temp)) = cond_place {
// We will call `clear_top_scope` if there's another guard. So
// we have to drop this variable now or it will be "storage
// leaked".
self.pop_variable(
post_guard_block,
region_scope.0,
cond_temp
);
} else {
bug!("Expected as_local_operand to produce a temporary");
}
}
let by_value_bindings = candidate.bindings.iter().filter(|binding| {
if let BindingMode::ByValue = binding.binding_mode { true } else { false }
});
@ -1577,7 +1549,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let local_id = self.var_local_id(binding.var_id, RefWithinGuard);
let place = Place::from(local_id);
self.cfg.push(
block,
post_guard_block,
Statement {
source_info: guard_end,
kind: StatementKind::FakeRead(FakeReadCause::ForGuardBinding, place),
@ -1652,11 +1624,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// denotes *R.
let ref_for_guard =
self.storage_live_binding(block, binding.var_id, binding.span, RefWithinGuard);
// Question: Why schedule drops if bindings are all
// shared-&'s?
// Answer: Because schedule_drop_for_binding also emits
// StorageDead's for those locals.
self.schedule_drop_for_binding(binding.var_id, binding.span, RefWithinGuard);
match binding.binding_mode {
BindingMode::ByValue => {
let rvalue = Rvalue::Ref(re_erased, BorrowKind::Shared, binding.source.clone());
@ -1670,11 +1637,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
binding.span,
OutsideGuard,
);
self.schedule_drop_for_binding(
binding.var_id,
binding.span,
OutsideGuard,
);
let rvalue = Rvalue::Ref(re_erased, borrow_kind, binding.source.clone());
self.cfg

View File

@ -251,7 +251,7 @@ struct Builder<'a, 'tcx> {
/// The current set of scopes, updated as we traverse;
/// see the `scope` module for more details.
scopes: Vec<scope::Scope<'tcx>>,
scopes: scope::Scopes<'tcx>,
/// The block-context: each time we build the code within an hair::Block,
/// we push a frame here tracking whether we are building a statement or
@ -274,10 +274,6 @@ struct Builder<'a, 'tcx> {
/// The number of `push_unsafe_block` levels in scope.
push_unsafe_count: usize,
/// The current set of breakables; see the `scope` module for more
/// details.
breakable_scopes: Vec<scope::BreakableScope<'tcx>>,
/// The vector of all scopes that we have created thus far;
/// we track this for debuginfo later.
source_scopes: IndexVec<SourceScope, SourceScopeData>,
@ -714,7 +710,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn_span: span,
arg_count,
is_generator,
scopes: vec![],
scopes: Default::default(),
block_context: BlockContext::new(),
source_scopes: IndexVec::new(),
source_scope: OUTERMOST_SOURCE_SCOPE,
@ -722,7 +718,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
guard_context: vec![],
push_unsafe_count: 0,
unpushed_unsafe: safety,
breakable_scopes: vec![],
local_decls: IndexVec::from_elem_n(
LocalDecl::new_return_place(return_ty, return_span),
1,
@ -814,7 +809,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Make sure we drop (parts of) the argument even when not matched on.
self.schedule_drop(
pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
argument_scope, &place, ty, DropKind::Value,
argument_scope, local, ty, DropKind::Value,
);
if let Some(pattern) = pattern {
@ -865,7 +860,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
let body = self.hir.mirror(ast_body);
self.into(&Place::RETURN_PLACE, block, body)
// `return_block` is called when we evaluate a `return` expression, so
// we just use `START_BLOCK` here.
self.in_breakable_scope(None, START_BLOCK, Place::RETURN_PLACE, |this| {
this.into(&Place::RETURN_PLACE, block, body)
})
}
fn get_unit_temp(&mut self) -> Place<'tcx> {

View File

@ -82,8 +82,8 @@ should go to.
*/
use crate::build::{BlockAnd, BlockAndExtension, Builder, CFG};
use crate::hair::LintLevel;
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
use crate::hair::{Expr, ExprRef, LintLevel};
use rustc::middle::region;
use rustc::ty::Ty;
use rustc::hir;
@ -94,7 +94,7 @@ use std::collections::hash_map::Entry;
use std::mem;
#[derive(Debug)]
pub struct Scope<'tcx> {
struct Scope {
/// The source scope this scope was created in.
source_scope: SourceScope,
@ -121,7 +121,7 @@ pub struct Scope<'tcx> {
/// out empty but grows as variables are declared during the
/// building process. This is a stack, so we always drop from the
/// end of the vector (top of the stack) first.
drops: Vec<DropData<'tcx>>,
drops: Vec<DropData>,
/// The cache for drop chain on “normal” exit into a particular BasicBlock.
cached_exits: FxHashMap<(BasicBlock, region::Scope), BasicBlock>,
@ -133,13 +133,20 @@ pub struct Scope<'tcx> {
cached_unwind: CachedBlock,
}
#[derive(Debug, Default)]
pub struct Scopes<'tcx> {
scopes: Vec<Scope>,
/// The current set of breakable scopes. See module comment for more details.
breakable_scopes: Vec<BreakableScope<'tcx>>,
}
#[derive(Debug)]
struct DropData<'tcx> {
struct DropData {
/// span where drop obligation was incurred (typically where place was declared)
span: Span,
/// place to drop
location: Place<'tcx>,
/// local to drop
local: Local,
/// Whether this is a value Drop or a StorageDead.
kind: DropKind,
@ -172,17 +179,25 @@ pub(crate) enum DropKind {
}
#[derive(Clone, Debug)]
pub struct BreakableScope<'tcx> {
struct BreakableScope<'tcx> {
/// Region scope of the loop
pub region_scope: region::Scope,
region_scope: region::Scope,
/// Where the body of the loop begins. `None` if block
pub continue_block: Option<BasicBlock>,
continue_block: Option<BasicBlock>,
/// Block to branch into when the loop or block terminates (either by being `break`-en out
/// from, or by having its condition to become false)
pub break_block: BasicBlock,
break_block: BasicBlock,
/// The destination of the loop/block expression itself (i.e., where to put the result of a
/// `break` expression)
pub break_destination: Place<'tcx>,
break_destination: Place<'tcx>,
}
/// The target of an expression that breaks out of a scope
#[derive(Clone, Copy, Debug)]
pub enum BreakableTarget {
Continue(region::Scope),
Break(region::Scope),
Return,
}
impl CachedBlock {
@ -208,16 +223,7 @@ impl CachedBlock {
}
}
impl DropKind {
fn may_panic(&self) -> bool {
match *self {
DropKind::Value => true,
DropKind::Storage => false
}
}
}
impl<'tcx> Scope<'tcx> {
impl Scope {
/// Invalidates all the cached blocks in the scope.
///
/// Should always be run for all inner scopes when a drop is pushed into some scope enclosing a
@ -257,13 +263,111 @@ impl<'tcx> Scope<'tcx> {
}
}
impl<'tcx> Scopes<'tcx> {
fn len(&self) -> usize {
self.scopes.len()
}
fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo), vis_scope: SourceScope) {
debug!("push_scope({:?})", region_scope);
self.scopes.push(Scope {
source_scope: vis_scope,
region_scope: region_scope.0,
region_scope_span: region_scope.1.span,
needs_cleanup: false,
drops: vec![],
cached_generator_drop: None,
cached_exits: Default::default(),
cached_unwind: CachedBlock::default(),
});
}
fn pop_scope(
&mut self,
region_scope: (region::Scope, SourceInfo),
) -> (Scope, Option<BasicBlock>) {
let scope = self.scopes.pop().unwrap();
assert_eq!(scope.region_scope, region_scope.0);
let unwind_to = self.scopes.last()
.and_then(|next_scope| next_scope.cached_unwind.get(false));
(scope, unwind_to)
}
fn may_panic(&self, scope_count: usize) -> bool {
let len = self.len();
self.scopes[(len - scope_count)..].iter().any(|s| s.needs_cleanup)
}
/// Finds the breakable scope for a given label. This is used for
/// resolving `return`, `break` and `continue`.
fn find_breakable_scope(
&self,
span: Span,
target: BreakableTarget,
) -> (BasicBlock, region::Scope, Option<Place<'tcx>>) {
let get_scope = |scope: region::Scope| {
// find the loop-scope by its `region::Scope`.
self.breakable_scopes.iter()
.rfind(|breakable_scope| breakable_scope.region_scope == scope)
.unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found"))
};
match target {
BreakableTarget::Return => {
let scope = &self.breakable_scopes[0];
if scope.break_destination != Place::RETURN_PLACE {
span_bug!(span, "`return` in item with no return scope");
}
(scope.break_block, scope.region_scope, Some(scope.break_destination.clone()))
}
BreakableTarget::Break(scope) => {
let scope = get_scope(scope);
(scope.break_block, scope.region_scope, Some(scope.break_destination.clone()))
}
BreakableTarget::Continue(scope) => {
let scope = get_scope(scope);
let continue_block = scope.continue_block
.unwrap_or_else(|| span_bug!(span, "missing `continue` block"));
(continue_block, scope.region_scope, None)
}
}
}
fn num_scopes_to(&self, region_scope: (region::Scope, SourceInfo), span: Span) -> usize {
let scope_count = 1 + self.scopes.iter().rev()
.position(|scope| scope.region_scope == region_scope.0)
.unwrap_or_else(|| {
span_bug!(span, "region_scope {:?} does not enclose", region_scope)
});
let len = self.len();
assert!(scope_count < len, "should not use `exit_scope` to pop ALL scopes");
scope_count
}
fn iter_mut(&mut self) -> impl DoubleEndedIterator<Item=&mut Scope> + '_ {
self.scopes.iter_mut().rev()
}
fn top_scopes(&mut self, count: usize) -> impl DoubleEndedIterator<Item=&mut Scope> + '_ {
let len = self.len();
self.scopes[len - count..].iter_mut()
}
/// Returns the topmost active scope, which is known to be alive until
/// the next scope expression.
fn topmost(&self) -> region::Scope {
self.scopes.last().expect("topmost_scope: no scopes present").region_scope
}
fn source_info(&self, index: usize, span: Span) -> SourceInfo {
self.scopes[self.len() - index].source_info(span)
}
}
impl<'a, 'tcx> Builder<'a, 'tcx> {
// Adding and removing scopes
// ==========================
/// Start a breakable scope, which tracks where `continue` and `break`
/// should branch to. See module comment for more details.
///
/// Returns the might_break attribute of the BreakableScope used.
// Start a breakable scope, which tracks where `continue`, `break` and
// `return` should branch to.
pub fn in_breakable_scope<F, R>(&mut self,
loop_block: Option<BasicBlock>,
break_block: BasicBlock,
@ -271,16 +375,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
f: F) -> R
where F: FnOnce(&mut Builder<'a, 'tcx>) -> R
{
let region_scope = self.topmost_scope();
let region_scope = self.scopes.topmost();
let scope = BreakableScope {
region_scope,
continue_block: loop_block,
break_block,
break_destination,
};
self.breakable_scopes.push(scope);
self.scopes.breakable_scopes.push(scope);
let res = f(self);
let breakable_scope = self.breakable_scopes.pop().unwrap();
let breakable_scope = self.scopes.breakable_scopes.pop().unwrap();
assert!(breakable_scope.region_scope == region_scope);
res
}
@ -350,18 +454,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// calls must be paired; using `in_scope` as a convenience
/// wrapper maybe preferable.
pub fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) {
debug!("push_scope({:?})", region_scope);
let vis_scope = self.source_scope;
self.scopes.push(Scope {
source_scope: vis_scope,
region_scope: region_scope.0,
region_scope_span: region_scope.1.span,
needs_cleanup: false,
drops: vec![],
cached_generator_drop: None,
cached_exits: Default::default(),
cached_unwind: CachedBlock::default(),
});
self.scopes.push_scope(region_scope, self.source_scope);
}
/// Pops a scope, which should have region scope `region_scope`,
@ -374,17 +467,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
debug!("pop_scope({:?}, {:?})", region_scope, block);
// If we are emitting a `drop` statement, we need to have the cached
// diverge cleanup pads ready in case that drop panics.
let may_panic =
self.scopes.last().unwrap().drops.iter().any(|s| s.kind.may_panic());
if may_panic {
if self.scopes.may_panic(1) {
self.diverge_cleanup();
}
let scope = self.scopes.pop().unwrap();
assert_eq!(scope.region_scope, region_scope.0);
let unwind_to = self.scopes.last().and_then(|next_scope| {
next_scope.cached_unwind.get(false)
}).unwrap_or_else(|| self.resume_block());
let (scope, unwind_to) = self.scopes.pop_scope(region_scope);
let unwind_to = unwind_to.unwrap_or_else(|| self.resume_block());
unpack!(block = build_scope_drops(
&mut self.cfg,
@ -399,6 +486,37 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
block.unit()
}
pub fn break_scope(
&mut self,
mut block: BasicBlock,
value: Option<ExprRef<'tcx>>,
scope: BreakableTarget,
source_info: SourceInfo,
) -> BlockAnd<()> {
let (mut target_block, region_scope, destination)
= self.scopes.find_breakable_scope(source_info.span, scope);
if let BreakableTarget::Return = scope {
// We call this now, rather than when we start lowering the
// function so that the return block doesn't precede the entire
// rest of the CFG. Some passes and LLVM prefer blocks to be in
// approximately CFG order.
target_block = self.return_block();
}
if let Some(destination) = destination {
if let Some(value) = value {
debug!("stmt_expr Break val block_context.push(SubExpr)");
self.block_context.push(BlockFrame::SubExpr);
unpack!(block = self.into(&destination, block, value));
self.block_context.pop();
} else {
self.cfg.push_assign_unit(block, source_info, &destination)
}
} else {
assert!(value.is_none(), "`return` and `break` should have a destination");
}
self.exit_scope(source_info.span, (region_scope, source_info), block, target_block);
self.cfg.start_new_block().unit()
}
/// Branch out of `block` to `target`, exiting all scopes up to
/// and including `region_scope`. This will insert whatever drops are
@ -410,22 +528,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
target: BasicBlock) {
debug!("exit_scope(region_scope={:?}, block={:?}, target={:?})",
region_scope, block, target);
let scope_count = 1 + self.scopes.iter().rev()
.position(|scope| scope.region_scope == region_scope.0)
.unwrap_or_else(|| {
span_bug!(span, "region_scope {:?} does not enclose", region_scope)
});
let len = self.scopes.len();
assert!(scope_count < len, "should not use `exit_scope` to pop ALL scopes");
let scope_count = self.scopes.num_scopes_to(region_scope, span);
// If we are emitting a `drop` statement, we need to have the cached
// diverge cleanup pads ready in case that drop panics.
let may_panic = self.scopes[(len - scope_count)..].iter().any(|s| s.needs_cleanup);
let may_panic = self.scopes.may_panic(scope_count);
if may_panic {
self.diverge_cleanup();
}
let mut scopes = self.scopes[(len - scope_count - 1)..].iter_mut().rev();
let mut scopes = self.scopes.top_scopes(scope_count + 1).rev();
let mut scope = scopes.next().unwrap();
for next_scope in scopes {
if scope.drops.is_empty() {
@ -466,9 +578,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
scope = next_scope;
}
let scope = &self.scopes[len - scope_count];
self.cfg.terminate(block, scope.source_info(span),
TerminatorKind::Goto { target });
let source_info = self.scopes.source_info(scope_count, span);
self.cfg.terminate(block, source_info, TerminatorKind::Goto { target });
}
/// Creates a path that performs all required cleanup for dropping a generator.
@ -479,9 +590,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Fill in the cache for unwinds
self.diverge_cleanup_gen(true);
let src_info = self.scopes[0].source_info(self.fn_span);
let src_info = self.scopes.source_info(self.scopes.len(), self.fn_span);
let resume_block = self.resume_block();
let mut scopes = self.scopes.iter_mut().rev().peekable();
let mut scopes = self.scopes.iter_mut().peekable();
let mut block = self.cfg.start_new_block();
let result = block;
@ -547,22 +658,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
scope
}
// Finding scopes
// ==============
/// Finds the breakable scope for a given label. This is used for
/// resolving `break` and `continue`.
pub fn find_breakable_scope(&self,
span: Span,
label: region::Scope)
-> &BreakableScope<'tcx> {
// find the loop-scope with the correct id
self.breakable_scopes.iter()
.rev()
.filter(|breakable_scope| breakable_scope.region_scope == label)
.next()
.unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found"))
}
/// Given a span and the current source scope, make a SourceInfo.
pub fn source_info(&self, span: Span) -> SourceInfo {
SourceInfo {
@ -571,25 +666,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
/// Returns the `region::Scope` of the scope which should be exited by a
/// return.
pub fn region_scope_of_return_scope(&self) -> region::Scope {
// The outermost scope (`scopes[0]`) will be the `CallSiteScope`.
// We want `scopes[1]`, which is the `ParameterScope`.
assert!(self.scopes.len() >= 2);
assert!(match self.scopes[1].region_scope.data {
region::ScopeData::Arguments => true,
_ => false,
});
self.scopes[1].region_scope
}
/// Returns the topmost active scope, which is known to be alive until
/// the next scope expression.
pub fn topmost_scope(&self) -> region::Scope {
self.scopes.last().expect("topmost_scope: no scopes present").region_scope
}
// Finding scopes
// ==============
/// Returns the scope that we should use as the lifetime of an
/// operand. Basically, an operand must live until it is consumed.
/// This is similar to, but not quite the same as, the temporary
@ -620,33 +698,32 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
None,
hir::BodyOwnerKind::Closure |
hir::BodyOwnerKind::Fn =>
Some(self.topmost_scope()),
Some(self.scopes.topmost()),
}
}
// Schedule an abort block - this is used for some ABIs that cannot unwind
pub fn schedule_abort(&mut self) -> BasicBlock {
self.scopes[0].needs_cleanup = true;
let source_info = self.scopes.source_info(self.scopes.len(), self.fn_span);
let abortblk = self.cfg.start_new_cleanup_block();
let source_info = self.scopes[0].source_info(self.fn_span);
self.cfg.terminate(abortblk, source_info, TerminatorKind::Abort);
self.cached_resume_block = Some(abortblk);
abortblk
}
// Scheduling drops
// ================
pub fn schedule_drop_storage_and_value(
&mut self,
span: Span,
region_scope: region::Scope,
place: &Place<'tcx>,
local: Local,
place_ty: Ty<'tcx>,
) {
self.schedule_drop(span, region_scope, place, place_ty, DropKind::Storage);
self.schedule_drop(span, region_scope, place, place_ty, DropKind::Value);
self.schedule_drop(span, region_scope, local, place_ty, DropKind::Storage);
self.schedule_drop(span, region_scope, local, place_ty, DropKind::Value);
}
// Scheduling drops
// ================
/// Indicates that `place` should be dropped on exit from
/// `region_scope`.
///
@ -656,7 +733,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
&mut self,
span: Span,
region_scope: region::Scope,
place: &Place<'tcx>,
local: Local,
place_ty: Ty<'tcx>,
drop_kind: DropKind,
) {
@ -664,22 +741,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
match drop_kind {
DropKind::Value => if !needs_drop { return },
DropKind::Storage => {
match *place {
Place::Base(PlaceBase::Local(index)) => if index.index() <= self.arg_count {
span_bug!(
span, "`schedule_drop` called with index {} and arg_count {}",
index.index(),
self.arg_count,
)
},
_ => span_bug!(
span, "`schedule_drop` called with non-`Local` place {:?}", place
),
if local.index() <= self.arg_count {
span_bug!(
span, "`schedule_drop` called with local {:?} and arg_count {}",
local,
self.arg_count,
)
}
}
}
for scope in self.scopes.iter_mut().rev() {
for scope in self.scopes.iter_mut() {
let this_scope = scope.region_scope == region_scope;
// When building drops, we try to cache chains of drops in such a way so these drops
// could be reused by the drops which would branch into the cached (already built)
@ -740,18 +812,84 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
scope.drops.push(DropData {
span: scope_end,
location: place.clone(),
local,
kind: drop_kind,
cached_block: CachedBlock::default(),
});
return;
}
}
span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, place);
span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local);
}
// Other
// =====
/// Branch based on a boolean condition.
///
/// This is a special case because the temporary for the condition needs to
/// be dropped on both the true and the false arm.
pub fn test_bool(
&mut self,
mut block: BasicBlock,
condition: Expr<'tcx>,
source_info: SourceInfo,
) -> (BasicBlock, BasicBlock) {
let cond = unpack!(block = self.as_local_operand(block, condition));
let true_block = self.cfg.start_new_block();
let false_block = self.cfg.start_new_block();
let term = TerminatorKind::if_(
self.hir.tcx(),
cond.clone(),
true_block,
false_block,
);
self.cfg.terminate(block, source_info, term);
match cond {
// Don't try to drop a constant
Operand::Constant(_) => (),
// If constants and statics, we don't generate StorageLive for this
// temporary, so don't try to generate StorageDead for it either.
_ if self.local_scope().is_none() => (),
Operand::Copy(Place::Base(PlaceBase::Local(cond_temp)))
| Operand::Move(Place::Base(PlaceBase::Local(cond_temp))) => {
// Manually drop the condition on both branches.
let top_scope = self.scopes.scopes.last_mut().unwrap();
let top_drop_data = top_scope.drops.pop().unwrap();
match top_drop_data.kind {
DropKind::Value { .. } => {
bug!("Drop scheduled on top of condition variable")
}
DropKind::Storage => {
let source_info = top_scope.source_info(top_drop_data.span);
let local = top_drop_data.local;
assert_eq!(local, cond_temp, "Drop scheduled on top of condition");
self.cfg.push(
true_block,
Statement {
source_info,
kind: StatementKind::StorageDead(local)
},
);
self.cfg.push(
false_block,
Statement {
source_info,
kind: StatementKind::StorageDead(local)
},
);
}
}
top_scope.invalidate_cache(true, self.is_generator, true);
}
_ => bug!("Expected as_local_operand to produce a temporary"),
}
(true_block, false_block)
}
/// Creates a path that performs all required cleanup for unwinding.
///
/// This path terminates in Resume. Returns the start of the path.
@ -790,14 +928,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Find the last cached block
debug!("diverge_cleanup_gen(self.scopes = {:?})", self.scopes);
let (mut target, first_uncached) = if let Some(cached_index) = self.scopes.iter()
.rposition(|scope| scope.cached_unwind.get(generator_drop).is_some()) {
(self.scopes[cached_index].cached_unwind.get(generator_drop).unwrap(), cached_index + 1)
} else {
(self.resume_block(), 0)
};
let cached_cleanup = self.scopes.iter_mut().enumerate()
.find_map(|(idx, ref scope)| {
let cached_block = scope.cached_unwind.get(generator_drop)?;
Some((cached_block, idx))
});
let (mut target, first_uncached) = cached_cleanup
.unwrap_or_else(|| (self.resume_block(), self.scopes.len()));
for scope in self.scopes[first_uncached..].iter_mut() {
for scope in self.scopes.top_scopes(first_uncached) {
target = build_diverge_scope(&mut self.cfg, scope.region_scope_span,
scope, target, generator_drop, self.is_generator);
}
@ -805,27 +944,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
target
}
/// Utility function for *non*-scope code to build their own drops
pub fn build_drop(&mut self,
block: BasicBlock,
span: Span,
location: Place<'tcx>,
ty: Ty<'tcx>) -> BlockAnd<()> {
if !self.hir.needs_drop(ty) {
return block.unit();
}
let source_info = self.source_info(span);
let next_target = self.cfg.start_new_block();
let diverge_target = self.diverge_cleanup();
self.cfg.terminate(block, source_info,
TerminatorKind::Drop {
location,
target: next_target,
unwind: Some(diverge_target),
});
next_target.unit()
}
/// Utility function for *non*-scope code to build their own drops
pub fn build_drop_and_replace(&mut self,
block: BasicBlock,
@ -877,73 +995,21 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
///
/// This is only needed for `match` arm scopes, because they have one
/// entrance per pattern, but only one exit.
pub fn clear_top_scope(&mut self, region_scope: region::Scope) {
let top_scope = self.scopes.last_mut().unwrap();
pub(crate) fn clear_top_scope(&mut self, region_scope: region::Scope) {
let top_scope = self.scopes.scopes.last_mut().unwrap();
assert_eq!(top_scope.region_scope, region_scope);
top_scope.drops.clear();
top_scope.invalidate_cache(false, self.is_generator, true);
}
/// Drops the single variable provided
///
/// * The scope must be the top scope.
/// * The variable must be in that scope.
/// * The variable must be at the top of that scope: it's the next thing
/// scheduled to drop.
/// * The drop must be of `DropKind::Storage`.
///
/// This is used for the boolean holding the result of the match guard. We
/// do this because:
///
/// * The boolean is different for each pattern
/// * There is only one exit for the arm scope
/// * The guard expression scope is too short, it ends just before the
/// boolean is tested.
pub fn pop_variable(
&mut self,
block: BasicBlock,
region_scope: region::Scope,
variable: Local,
) {
let top_scope = self.scopes.last_mut().unwrap();
assert_eq!(top_scope.region_scope, region_scope);
let top_drop_data = top_scope.drops.pop().unwrap();
match top_drop_data.kind {
DropKind::Value { .. } => {
bug!("Should not be calling pop_top_variable on non-copy type!")
}
DropKind::Storage => {
// Drop the storage for both value and storage drops.
// Only temps and vars need their storage dead.
match top_drop_data.location {
Place::Base(PlaceBase::Local(index)) => {
let source_info = top_scope.source_info(top_drop_data.span);
assert_eq!(index, variable);
self.cfg.push(block, Statement {
source_info,
kind: StatementKind::StorageDead(index)
});
}
_ => unreachable!(),
}
}
}
top_scope.invalidate_cache(true, self.is_generator, true);
}
}
/// Builds drops for pop_scope and exit_scope.
fn build_scope_drops<'tcx>(
cfg: &mut CFG<'tcx>,
is_generator: bool,
scope: &Scope<'tcx>,
scope: &Scope,
mut block: BasicBlock,
last_unwind_to: BasicBlock,
arg_count: usize,
@ -973,6 +1039,7 @@ fn build_scope_drops<'tcx>(
for drop_idx in (0..scope.drops.len()).rev() {
let drop_data = &scope.drops[drop_idx];
let source_info = scope.source_info(drop_data.span);
let local = drop_data.local;
match drop_data.kind {
DropKind::Value => {
let unwind_to = get_unwind_to(scope, is_generator, drop_idx, generator_drop)
@ -980,32 +1047,27 @@ fn build_scope_drops<'tcx>(
let next = cfg.start_new_block();
cfg.terminate(block, source_info, TerminatorKind::Drop {
location: drop_data.location.clone(),
location: local.into(),
target: next,
unwind: Some(unwind_to)
});
block = next;
}
DropKind::Storage => {
// Drop the storage for both value and storage drops.
// Only temps and vars need their storage dead.
match drop_data.location {
Place::Base(PlaceBase::Local(index)) if index.index() > arg_count => {
cfg.push(block, Statement {
source_info,
kind: StatementKind::StorageDead(index)
});
}
_ => unreachable!(),
}
assert!(local.index() > arg_count);
cfg.push(block, Statement {
source_info,
kind: StatementKind::StorageDead(local)
});
}
}
}
block.unit()
}
fn get_unwind_to<'tcx>(
scope: &Scope<'tcx>,
fn get_unwind_to(
scope: &Scope,
is_generator: bool,
unwind_from: usize,
generator_drop: bool,
@ -1031,7 +1093,7 @@ fn get_unwind_to<'tcx>(
fn build_diverge_scope<'tcx>(cfg: &mut CFG<'tcx>,
span: Span,
scope: &mut Scope<'tcx>,
scope: &mut Scope,
mut target: BasicBlock,
generator_drop: bool,
is_generator: bool)
@ -1075,26 +1137,20 @@ fn build_diverge_scope<'tcx>(cfg: &mut CFG<'tcx>,
// this is not what clang does.
match drop_data.kind {
DropKind::Storage if is_generator => {
// Only temps and vars need their storage dead.
match drop_data.location {
Place::Base(PlaceBase::Local(index)) => {
storage_deads.push(Statement {
source_info: source_info(drop_data.span),
kind: StatementKind::StorageDead(index)
});
if !target_built_by_us {
// We cannot add statements to an existing block, so we create a new
// block for our StorageDead statements.
let block = cfg.start_new_cleanup_block();
let source_info = SourceInfo { span: DUMMY_SP, scope: source_scope };
cfg.terminate(block, source_info,
TerminatorKind::Goto { target: target });
target = block;
target_built_by_us = true;
}
}
_ => unreachable!(),
};
storage_deads.push(Statement {
source_info: source_info(drop_data.span),
kind: StatementKind::StorageDead(drop_data.local)
});
if !target_built_by_us {
// We cannot add statements to an existing block, so we create a new
// block for our StorageDead statements.
let block = cfg.start_new_cleanup_block();
let source_info = SourceInfo { span: DUMMY_SP, scope: source_scope };
cfg.terminate(block, source_info,
TerminatorKind::Goto { target: target });
target = block;
target_built_by_us = true;
}
*drop_data.cached_block.ref_mut(generator_drop) = Some(target);
}
DropKind::Storage => {}
@ -1107,12 +1163,15 @@ fn build_diverge_scope<'tcx>(cfg: &mut CFG<'tcx>,
} else {
push_storage_deads(cfg, target, &mut storage_deads);
let block = cfg.start_new_cleanup_block();
cfg.terminate(block, source_info(drop_data.span),
TerminatorKind::Drop {
location: drop_data.location.clone(),
target,
unwind: None
});
cfg.terminate(
block,
source_info(drop_data.span),
TerminatorKind::Drop {
location: drop_data.local.into(),
target,
unwind: None
},
);
*cached_block = Some(block);
target_built_by_us = true;
block

View File

@ -49,7 +49,6 @@ fn mirror_stmts<'a, 'tcx>(
for (index, stmt) in stmts.iter().enumerate() {
let hir_id = stmt.hir_id;
let opt_dxn_ext = cx.region_scope_tree.opt_destruction_scope(hir_id.local_id);
let stmt_span = StatementSpan(cx.tcx.hir().span(hir_id));
match stmt.node {
hir::StmtKind::Expr(ref expr) |
hir::StmtKind::Semi(ref expr) => {
@ -62,7 +61,6 @@ fn mirror_stmts<'a, 'tcx>(
expr: expr.to_ref(),
},
opt_destruction_scope: opt_dxn_ext,
span: stmt_span,
})))
}
hir::StmtKind::Item(..) => {
@ -107,7 +105,6 @@ fn mirror_stmts<'a, 'tcx>(
lint_level: LintLevel::Explicit(local.hir_id),
},
opt_destruction_scope: opt_dxn_ext,
span: stmt_span,
})));
}
}

View File

@ -55,14 +55,10 @@ pub enum StmtRef<'tcx> {
Mirror(Box<Stmt<'tcx>>),
}
#[derive(Clone, Debug)]
pub struct StatementSpan(pub Span);
#[derive(Clone, Debug)]
pub struct Stmt<'tcx> {
pub kind: StmtKind<'tcx>,
pub opt_destruction_scope: Option<region::Scope>,
pub span: StatementSpan,
}
#[derive(Clone, Debug)]

View File

@ -24,7 +24,7 @@ impl Drop for S {
// let mut _0: ();
// let _1: std::boxed::Box<S>;
// let mut _2: std::boxed::Box<S>;
// let mut _3: ();
// let _3: ();
// let mut _4: std::boxed::Box<S>;
// scope 1 {
// }
@ -50,6 +50,7 @@ impl Drop for S {
//
// bb4: {
// StorageDead(_2);
// StorageLive(_3);
// StorageLive(_4);
// _4 = move _1;
// _3 = const std::mem::drop::<std::boxed::Box<S>>(move _4) -> [return: bb5, unwind: bb7];
@ -69,6 +70,7 @@ impl Drop for S {
//
// bb8: {
// StorageDead(_4);
// StorageDead(_3);
// _0 = ();
// drop(_1) -> bb9;
// }

View File

@ -61,12 +61,14 @@ fn main() {
// END rustc.foo.CopyPropagation.after.mir
// START rustc.bar.CopyPropagation.before.mir
// bb0: {
// StorageLive(_2);
// StorageLive(_3);
// _3 = _1;
// _2 = const dummy(move _3) -> bb1;
// }
// bb1: {
// StorageDead(_3);
// StorageDead(_2);
// _1 = const 5u8;
// ...
// return;

View File

@ -18,6 +18,7 @@ fn main() {
// }
// bb1: {
// StorageDead(_3);
// StorageDead(_2);
// goto -> bb5;
// }
// bb2: {
@ -36,6 +37,7 @@ fn main() {
// goto -> bb3;
// }
// bb7: {
// StorageLive(_2);
// StorageLive(_3);
// goto -> bb1;
// }

View File

@ -54,6 +54,7 @@ fn main() {
// }
// bb2: {
// ...
// StorageLive(_6);
// StorageLive(_7);
// _7 = move _2;
// _6 = const take::<Foo>(move _7) -> [return: bb9, unwind: bb8];
@ -81,16 +82,20 @@ fn main() {
// }
// bb8 (cleanup): {
// StorageDead(_7);
// StorageDead(_6);
// goto -> bb7;
// }
// bb9: {
// StorageDead(_7);
// StorageDead(_6);
// StorageLive(_8);
// StorageLive(_9);
// _9 = move _3;
// _8 = const take::<Bar>(move _9) -> [return: bb10, unwind: bb11];
// }
// bb10: {
// StorageDead(_9);
// StorageDead(_8);
// ...
// StorageDead(_3);
// StorageDead(_2);
@ -98,6 +103,7 @@ fn main() {
// }
// bb11 (cleanup): {
// StorageDead(_9);
// StorageDead(_8);
// goto -> bb7;
// }
// bb12: {

View File

@ -25,6 +25,7 @@ fn main() {
// falseUnwind -> [real: bb3, cleanup: bb1];
// }
// bb3: {
// StorageLive(_3);
// StorageLive(_4);
// _4 = _1;
// FakeRead(ForMatchedPlace, _4);
@ -34,6 +35,7 @@ fn main() {
// bb5: {
// _3 = ();
// StorageDead(_4);
// StorageDead(_3);
// _1 = const true;
// _2 = ();
// goto -> bb2;
@ -41,6 +43,7 @@ fn main() {
// bb6: {
// _0 = ();
// StorageDead(_4);
// StorageDead(_3);
// StorageDead(_1);
// return;
// }

View File

@ -42,7 +42,7 @@ impl S {
// START rustc.test.ElaborateDrops.after.mir
// let mut _0: ();
// let _1: S;
// let mut _3: ();
// let _3: ();
// let mut _4: S;
// let mut _5: S;
// let mut _6: bool;

View File

@ -21,7 +21,7 @@ fn main() {
// let _2: i32;
// let mut _3: bool;
// let mut _4: !;
// let mut _5: ();
// let _5: ();
// let mut _6: &i32;
// scope 1 {
// }
@ -73,12 +73,14 @@ fn main() {
// bb12: {
// FakeRead(ForLet, _2);
// StorageDead(_3);
// StorageLive(_5);
// StorageLive(_6);
// _6 = &_2;
// _5 = const std::mem::drop::<&i32>(move _6) -> [return: bb13, unwind: bb4];
// }
// bb13: {
// StorageDead(_6);
// StorageDead(_5);
// _1 = ();
// StorageDead(_2);
// goto -> bb1;

View File

@ -25,6 +25,7 @@ fn main() {
// bb3: { // Entry into the loop
// _1 = ();
// StorageDead(_2);
// StorageDead(_1);
// goto -> bb5;
// }
// ...

View File

@ -103,10 +103,6 @@ fn main() {
// bb10: { // `else` block - first time
// _9 = (*_6);
// StorageDead(_10);
// FakeRead(ForMatchGuard, _3);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// FakeRead(ForGuardBinding, _8);
// switchInt(move _9) -> [false: bb16, otherwise: bb15];
// }
// bb11: { // `return 3` - first time
@ -128,6 +124,10 @@ fn main() {
// }
// bb15: {
// StorageDead(_9);
// FakeRead(ForMatchGuard, _3);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// FakeRead(ForGuardBinding, _8);
// StorageLive(_5);
// _5 = (_2.1: bool);
// StorageLive(_7);
@ -159,10 +159,6 @@ fn main() {
// bb19: { // `else` block - second time
// _12 = (*_6);
// StorageDead(_13);
// FakeRead(ForMatchGuard, _3);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// FakeRead(ForGuardBinding, _8);
// switchInt(move _12) -> [false: bb22, otherwise: bb21];
// }
// bb20: {
@ -175,6 +171,10 @@ fn main() {
// }
// bb21: { // bindings for arm 1
// StorageDead(_12);
// FakeRead(ForMatchGuard, _3);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// FakeRead(ForGuardBinding, _8);
// StorageLive(_5);
// _5 = (_2.0: bool);
// StorageLive(_7);

View File

@ -71,12 +71,12 @@ fn main() {
// _7 = const guard() -> [return: bb7, unwind: bb1];
// }
// bb7: { // end of guard
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// switchInt(move _7) -> [false: bb9, otherwise: bb8];
// }
// bb8: { // arm1
// StorageDead(_7);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// StorageLive(_5);
// _5 = ((_2 as Some).0: i32);
// StorageLive(_8);
@ -138,12 +138,12 @@ fn main() {
// _7 = const guard() -> [return: bb6, unwind: bb1];
// }
// bb6: { // end of guard
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// switchInt(move _7) -> [false: bb8, otherwise: bb7];
// }
// bb7: {
// StorageDead(_7);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// StorageLive(_5);
// _5 = ((_2 as Some).0: i32);
// StorageLive(_8);
@ -209,12 +209,12 @@ fn main() {
// _8 = const guard() -> [return: bb6, unwind: bb1];
// }
// bb6: { //end of guard1
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _7);
// switchInt(move _8) -> [false: bb8, otherwise: bb7];
// }
// bb7: {
// StorageDead(_8);
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _7);
// StorageLive(_6);
// _6 = ((_2 as Some).0: i32);
// _1 = const 1i32;
@ -245,12 +245,12 @@ fn main() {
// }
// bb11: { // end of guard2
// StorageDead(_13);
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _11);
// switchInt(move _12) -> [false: bb13, otherwise: bb12];
// }
// bb12: { // binding4 & arm4
// StorageDead(_12);
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _11);
// StorageLive(_10);
// _10 = ((_2 as Some).0: i32);
// _1 = const 3i32;

View File

@ -54,11 +54,11 @@ fn main() {
// _8 = &shallow _1;
// StorageLive(_9);
// _9 = _2;
// FakeRead(ForMatchGuard, _8);
// switchInt(move _9) -> [false: bb11, otherwise: bb10];
// }
// bb10: {
// StorageDead(_9);
// FakeRead(ForMatchGuard, _8);
// _3 = const 0i32;
// goto -> bb14;
// }
@ -75,6 +75,7 @@ fn main() {
// goto -> bb14;
// }
// bb14: {
// StorageDead(_3);
// _0 = ();
// StorageDead(_2);
// StorageDead(_1);

View File

@ -22,9 +22,9 @@ fn main() {
// END RUST SOURCE
// START rustc.main.nll.0.mir
// | '_#2r | U0 | {bb2[0..=8], bb3[0], bb5[0..=1]}
// | '_#3r | U0 | {bb2[1..=8], bb3[0], bb5[0..=1]}
// | '_#4r | U0 | {bb2[4..=8], bb3[0], bb5[0..=1]}
// | '_#2r | U0 | {bb2[0..=8], bb3[0], bb5[0..=2]}
// | '_#3r | U0 | {bb2[1..=8], bb3[0], bb5[0..=2]}
// | '_#4r | U0 | {bb2[4..=8], bb3[0], bb5[0..=2]}
// END rustc.main.nll.0.mir
// START rustc.main.nll.0.mir
// let _2: &'_#3r usize;

View File

@ -38,14 +38,14 @@ fn main() {
// _7 = &shallow (*(*((_1 as Some).0: &'<empty> &'<empty> i32)));
// StorageLive(_8);
// _8 = _2;
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForMatchGuard, _6);
// FakeRead(ForMatchGuard, _7);
// switchInt(move _8) -> [false: bb6, otherwise: bb5];
// }
// bb5: {
// StorageDead(_8);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForMatchGuard, _6);
// FakeRead(ForMatchGuard, _7);
// _0 = const 0i32;
// goto -> bb7;
// }
@ -84,14 +84,14 @@ fn main() {
// nop;
// StorageLive(_8);
// _8 = _2;
// nop;
// nop;
// nop;
// nop;
// switchInt(move _8) -> [false: bb6, otherwise: bb5];
// }
// bb5: {
// StorageDead(_8);
// nop;
// nop;
// nop;
// nop;
// _0 = const 0i32;
// goto -> bb7;
// }

View File

@ -12,6 +12,7 @@ fn main() {
// StorageLive(_1);
// _1 = const 0i32;
// FakeRead(ForLet, _1);
// StorageLive(_2);
// StorageLive(_3);
// StorageLive(_4);
// StorageLive(_5);
@ -23,6 +24,7 @@ fn main() {
// _2 = ();
// StorageDead(_4);
// StorageDead(_3);
// StorageDead(_2);
// StorageLive(_6);
// _6 = const 1i32;
// FakeRead(ForLet, _6);

View File

@ -0,0 +1,59 @@
// Test that we correctly generate StorageDead statements for while loop
// conditions on all branches
fn get_bool(c: bool) -> bool {
c
}
fn while_loop(c: bool) {
while get_bool(c) {
if get_bool(c) {
break;
}
}
}
fn main() {
while_loop(false);
}
// END RUST SOURCE
// START rustc.while_loop.PreCodegen.after.mir
// bb0: {
// StorageLive(_2);
// StorageLive(_3);
// _3 = _1;
// _2 = const get_bool(move _3) -> bb2;
// }
// bb1: {
// return;
// }
// bb2: {
// StorageDead(_3);
// switchInt(move _2) -> [false: bb4, otherwise: bb3];
// }
// bb3: {
// StorageDead(_2);
// StorageLive(_4);
// StorageLive(_5);
// _5 = _1;
// _4 = const get_bool(move _5) -> bb5;
// }
// bb4: {
// StorageDead(_2);
// goto -> bb1;
// }
// bb5: {
// StorageDead(_5);
// switchInt(_4) -> [false: bb6, otherwise: bb7];
// }
// bb6: {
// StorageDead(_4);
// goto -> bb0;
// }
// bb7: {
// StorageDead(_4);
// goto -> bb1;
// }
// END rustc.while_loop.PreCodegen.after.mir

View File

@ -0,0 +1,32 @@
// Test that we don't consider temporaries for statement expressions as live
// across yields
// check-pass
// edition:2018
#![feature(async_await, generators, generator_trait)]
use std::ops::Generator;
async fn drop_and_await() {
async {};
async {}.await;
}
fn drop_and_yield() {
let x = || {
String::new();
yield;
};
Box::pin(x).as_mut().resume();
let y = static || {
String::new();
yield;
};
Box::pin(y).as_mut().resume();
}
fn main() {
drop_and_await();
drop_and_yield();
}

View File

@ -0,0 +1,11 @@
// We used to incorrectly assign to `x` twice when generating MIR for this
// function, preventing this from compiling.
// check-pass
fn main() {
let x = while false {
break;
};
let y = 'l: while break 'l {};
}