move the drop expansion code to rustc_mir

This commit is contained in:
Ariel Ben-Yehuda 2017-03-09 20:10:05 +02:00
parent e1f3c67cb4
commit 2b9fea1300
15 changed files with 728 additions and 621 deletions

View File

@ -15,6 +15,7 @@ use rustc::mir::{BasicBlock, Mir};
use rustc_data_structures::bitslice::bits_to_string;
use rustc_data_structures::indexed_set::{IdxSet};
use rustc_data_structures::indexed_vec::Idx;
use rustc_mir::util as mir_util;
use dot;
use dot::IntoCow;
@ -219,7 +220,7 @@ impl<'a, 'tcx, MWF, P> dot::Labeller<'a> for Graph<'a, 'tcx, MWF, P>
}
Ok(())
}
::rustc_mir::graphviz::write_node_label(
mir_util::write_graphviz_node_label(
*n, self.mbcx.mir(), &mut v, 4,
|w| {
let flow = self.mbcx.flow_state();

View File

@ -14,10 +14,10 @@ use rustc_data_structures::bitslice::BitSlice; // adds set_bit/get_bit to &[usiz
use rustc_data_structures::bitslice::{BitwiseOperator};
use rustc_data_structures::indexed_set::{IdxSet};
use rustc_data_structures::indexed_vec::Idx;
use rustc_mir::util::elaborate_drops::DropFlagState;
use super::super::gather_moves::{HasMoveData, MoveData, MoveOutIndex, MovePathIndex};
use super::super::MoveDataParamEnv;
use super::super::DropFlagState;
use super::super::drop_flag_effects_for_function_entry;
use super::super::drop_flag_effects_for_location;
use super::super::on_lookup_result_bits;

View File

@ -13,22 +13,20 @@ use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
use super::dataflow::{DataflowResults};
use super::{drop_flag_effects_for_location, on_all_children_bits};
use super::on_lookup_result_bits;
use super::{DropFlagState, MoveDataParamEnv};
use super::patch::MirPatch;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::{Kind, Subst, Substs};
use rustc::ty::util::IntTypeExt;
use super::MoveDataParamEnv;
use rustc::ty::{self, TyCtxt};
use rustc::mir::*;
use rustc::mir::transform::{Pass, MirPass, MirSource};
use rustc::middle::const_val::ConstVal;
use rustc::middle::lang_items;
use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_data_structures::indexed_vec::Idx;
use rustc_mir::util::patch::MirPatch;
use rustc_mir::util::elaborate_drops::{DropFlagState, elaborate_drop};
use rustc_mir::util::elaborate_drops::{DropElaborator, DropStyle, DropFlagMode};
use syntax_pos::Span;
use std::fmt;
use std::iter;
use std::u32;
pub struct ElaborateDrops;
@ -109,12 +107,116 @@ impl InitializationData {
}
}
impl fmt::Debug for InitializationData {
struct Elaborator<'a, 'b: 'a, 'tcx: 'b> {
init_data: &'a InitializationData,
ctxt: &'a mut ElaborateDropsCtxt<'b, 'tcx>,
}
impl<'a, 'b, 'tcx> fmt::Debug for Elaborator<'a, 'b, 'tcx> {
fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
Ok(())
}
}
impl<'a, 'b, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, 'b, 'tcx> {
type Path = MovePathIndex;
fn patch(&mut self) -> &mut MirPatch<'tcx> {
&mut self.ctxt.patch
}
fn mir(&self) -> &'a Mir<'tcx> {
self.ctxt.mir
}
fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx> {
self.ctxt.tcx
}
fn param_env(&self) -> &'a ty::ParameterEnvironment<'tcx> {
self.ctxt.param_env()
}
fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
let ((maybe_live, maybe_dead), multipart) = match mode {
DropFlagMode::Shallow => (self.init_data.state(path), false),
DropFlagMode::Deep => {
let mut some_live = false;
let mut some_dead = false;
let mut children_count = 0;
on_all_children_bits(
self.tcx(), self.mir(), self.ctxt.move_data(),
path, |child| {
if self.ctxt.path_needs_drop(child) {
let (live, dead) = self.init_data.state(child);
debug!("elaborate_drop: state({:?}) = {:?}",
child, (live, dead));
some_live |= live;
some_dead |= dead;
children_count += 1;
}
});
((some_live, some_dead), children_count != 1)
}
};
match (maybe_live, maybe_dead, multipart) {
(false, _, _) => DropStyle::Dead,
(true, false, _) => DropStyle::Static,
(true, true, false) => DropStyle::Conditional,
(true, true, true) => DropStyle::Open,
}
}
fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
match mode {
DropFlagMode::Shallow => {
self.ctxt.set_drop_flag(loc, path, DropFlagState::Absent);
}
DropFlagMode::Deep => {
on_all_children_bits(
self.tcx(), self.mir(), self.ctxt.move_data(), path,
|child| self.ctxt.set_drop_flag(loc, child, DropFlagState::Absent)
);
}
}
}
fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path> {
super::move_path_children_matching(self.ctxt.move_data(), path, |p| {
match p {
&Projection {
elem: ProjectionElem::Field(idx, _), ..
} => idx == field,
_ => false
}
})
}
fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
super::move_path_children_matching(self.ctxt.move_data(), path, |p| {
match p {
&Projection { elem: ProjectionElem::Deref, .. } => true,
_ => false
}
})
}
fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path> {
super::move_path_children_matching(self.ctxt.move_data(), path, |p| {
match p {
&Projection {
elem: ProjectionElem::Downcast(_, idx), ..
} => idx == variant,
_ => false
}
})
}
fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
self.ctxt.drop_flag(path).map(Operand::Consume)
}
}
struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
@ -125,19 +227,6 @@ struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
patch: MirPatch<'tcx>,
}
#[derive(Copy, Clone, Debug)]
struct DropCtxt<'a, 'tcx: 'a> {
source_info: SourceInfo,
is_cleanup: bool,
init_data: &'a InitializationData,
lvalue: &'a Lvalue<'tcx>,
path: MovePathIndex,
succ: BasicBlock,
unwind: Option<BasicBlock>
}
impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
@ -254,19 +343,22 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
let init_data = self.initialization_data_at(loc);
match self.move_data().rev_lookup.find(location) {
LookupResult::Exact(path) => {
self.elaborate_drop(&DropCtxt {
source_info: terminator.source_info,
is_cleanup: data.is_cleanup,
elaborate_drop(
&mut Elaborator {
init_data: &init_data,
lvalue: location,
path: path,
succ: target,
unwind: if data.is_cleanup {
ctxt: self
},
terminator.source_info,
data.is_cleanup,
location,
path,
target,
if data.is_cleanup {
None
} else {
Some(Option::unwrap_or(unwind, resume_block))
}
}, bb);
},
bb)
}
LookupResult::Parent(..) => {
span_bug!(terminator.source_info.span,
@ -343,15 +435,18 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
let init_data = self.initialization_data_at(loc);
self.elaborate_drop(&DropCtxt {
source_info: terminator.source_info,
is_cleanup: data.is_cleanup,
elaborate_drop(
&mut Elaborator {
init_data: &init_data,
lvalue: location,
path: path,
succ: target,
unwind: Some(unwind)
}, bb);
ctxt: self
},
terminator.source_info,
data.is_cleanup,
location,
path,
target,
Some(unwind),
bb);
on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
self.set_drop_flag(Location { block: target, statement_index: 0 },
child, DropFlagState::Present);
@ -372,547 +467,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
}
}
/// This elaborates a single drop instruction, located at `bb`, and
/// patches over it.
///
/// The elaborated drop checks the drop flags to only drop what
/// is initialized.
///
/// In addition, the relevant drop flags also need to be cleared
/// to avoid double-drops. However, in the middle of a complex
/// drop, one must avoid clearing some of the flags before they
/// are read, as that would cause a memory leak.
///
/// In particular, when dropping an ADT, multiple fields may be
/// joined together under the `rest` subpath. They are all controlled
/// by the primary drop flag, but only the last rest-field dropped
/// should clear it (and it must also not clear anything else).
///
/// FIXME: I think we should just control the flags externally
/// and then we do not need this machinery.
fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
debug!("elaborate_drop({:?})", c);
let mut some_live = false;
let mut some_dead = false;
let mut children_count = 0;
on_all_children_bits(
self.tcx, self.mir, self.move_data(),
c.path, |child| {
if self.path_needs_drop(child) {
let (live, dead) = c.init_data.state(child);
debug!("elaborate_drop: state({:?}) = {:?}",
child, (live, dead));
some_live |= live;
some_dead |= dead;
children_count += 1;
}
});
debug!("elaborate_drop({:?}): live - {:?}", c,
(some_live, some_dead));
match (some_live, some_dead) {
(false, false) | (false, true) => {
// dead drop - patch it out
self.patch.patch_terminator(bb, TerminatorKind::Goto {
target: c.succ
});
}
(true, false) => {
// static drop - just set the flag
self.patch.patch_terminator(bb, TerminatorKind::Drop {
location: c.lvalue.clone(),
target: c.succ,
unwind: c.unwind
});
self.drop_flags_for_drop(c, bb);
}
(true, true) => {
// dynamic drop
let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
self.conditional_drop(c)
} else {
self.open_drop(c)
};
self.patch.patch_terminator(bb, TerminatorKind::Goto {
target: drop_bb
});
}
}
}
/// Return the lvalue and move path for each field of `variant`,
/// (the move path is `None` if the field is a rest field).
fn move_paths_for_fields(&self,
base_lv: &Lvalue<'tcx>,
variant_path: MovePathIndex,
variant: &'tcx ty::VariantDef,
substs: &'tcx Substs<'tcx>)
-> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
{
variant.fields.iter().enumerate().map(|(i, f)| {
let subpath =
super::move_path_children_matching(self.move_data(), variant_path, |p| {
match p {
&Projection {
elem: ProjectionElem::Field(idx, _), ..
} => idx.index() == i,
_ => false
}
});
let field_ty =
self.tcx.normalize_associated_type_in_env(
&f.ty(self.tcx, substs),
self.param_env()
);
(base_lv.clone().field(Field::new(i), field_ty), subpath)
}).collect()
}
/// Create one-half of the drop ladder for a list of fields, and return
/// the list of steps in it in reverse order.
///
/// `unwind_ladder` is such a list of steps in reverse order,
/// which is called instead of the next step if the drop unwinds
/// (the first field is never reached). If it is `None`, all
/// unwind targets are left blank.
fn drop_halfladder<'a>(&mut self,
c: &DropCtxt<'a, 'tcx>,
unwind_ladder: Option<Vec<BasicBlock>>,
succ: BasicBlock,
fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
is_cleanup: bool)
-> Vec<BasicBlock>
{
let mut unwind_succ = if is_cleanup {
None
} else {
c.unwind
};
let mut succ = self.new_block(
c, c.is_cleanup, TerminatorKind::Goto { target: succ }
);
// Always clear the "master" drop flag at the bottom of the
// ladder. This is needed because the "master" drop flag
// protects the ADT's discriminant, which is invalidated
// after the ADT is dropped.
self.set_drop_flag(
Location { block: succ, statement_index: 0 },
c.path,
DropFlagState::Absent
);
fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
succ = if let Some(path) = path {
debug!("drop_ladder: for std field {} ({:?})", i, lv);
self.elaborated_drop_block(&DropCtxt {
source_info: c.source_info,
is_cleanup: is_cleanup,
init_data: c.init_data,
lvalue: lv,
path: path,
succ: succ,
unwind: unwind_succ,
})
} else {
debug!("drop_ladder: for rest field {} ({:?})", i, lv);
self.complete_drop(&DropCtxt {
source_info: c.source_info,
is_cleanup: is_cleanup,
init_data: c.init_data,
lvalue: lv,
path: c.path,
succ: succ,
unwind: unwind_succ,
}, false)
};
unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
succ
}).collect()
}
/// Create a full drop ladder, consisting of 2 connected half-drop-ladders
///
/// For example, with 3 fields, the drop ladder is
///
/// .d0:
/// ELAB(drop location.0 [target=.d1, unwind=.c1])
/// .d1:
/// ELAB(drop location.1 [target=.d2, unwind=.c2])
/// .d2:
/// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
/// .c1:
/// ELAB(drop location.1 [target=.c2])
/// .c2:
/// ELAB(drop location.2 [target=`c.unwind])
fn drop_ladder<'a>(&mut self,
c: &DropCtxt<'a, 'tcx>,
fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
-> BasicBlock
{
debug!("drop_ladder({:?}, {:?})", c, fields);
let mut fields = fields;
fields.retain(|&(ref lvalue, _)| {
let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
self.tcx.type_needs_drop_given_env(ty, self.param_env())
});
debug!("drop_ladder - fields needing drop: {:?}", fields);
let unwind_ladder = if c.is_cleanup {
None
} else {
Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
};
self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
.last().cloned().unwrap_or(c.succ)
}
fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
-> BasicBlock
{
debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
let fields = tys.iter().enumerate().map(|(i, &ty)| {
(c.lvalue.clone().field(Field::new(i), ty),
super::move_path_children_matching(
self.move_data(), c.path, |proj| match proj {
&Projection {
elem: ProjectionElem::Field(f, _), ..
} => f.index() == i,
_ => false
}
))
}).collect();
self.drop_ladder(c, fields)
}
fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
-> BasicBlock
{
debug!("open_drop_for_box({:?}, {:?})", c, ty);
let interior_path = super::move_path_children_matching(
self.move_data(), c.path, |proj| match proj {
&Projection { elem: ProjectionElem::Deref, .. } => true,
_ => false
}).unwrap();
let interior = c.lvalue.clone().deref();
let inner_c = DropCtxt {
lvalue: &interior,
unwind: c.unwind.map(|u| {
self.box_free_block(c, ty, u, true)
}),
succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
path: interior_path,
..*c
};
self.elaborated_drop_block(&inner_c)
}
fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
-> BasicBlock {
debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
match adt.variants.len() {
1 => {
let fields = self.move_paths_for_fields(
c.lvalue,
c.path,
&adt.variants[0],
substs
);
self.drop_ladder(c, fields)
}
_ => {
let mut values = Vec::with_capacity(adt.variants.len());
let mut blocks = Vec::with_capacity(adt.variants.len());
let mut otherwise = None;
for (variant_index, discr) in adt.discriminants(self.tcx).enumerate() {
let subpath = super::move_path_children_matching(
self.move_data(), c.path, |proj| match proj {
&Projection {
elem: ProjectionElem::Downcast(_, idx), ..
} => idx == variant_index,
_ => false
});
if let Some(variant_path) = subpath {
let base_lv = c.lvalue.clone().elem(
ProjectionElem::Downcast(adt, variant_index)
);
let fields = self.move_paths_for_fields(
&base_lv,
variant_path,
&adt.variants[variant_index],
substs);
values.push(discr);
blocks.push(self.drop_ladder(c, fields));
} else {
// variant not found - drop the entire enum
if let None = otherwise {
otherwise = Some(self.complete_drop(c, true));
}
}
}
if let Some(block) = otherwise {
blocks.push(block);
} else {
values.pop();
}
// If there are multiple variants, then if something
// is present within the enum the discriminant, tracked
// by the rest path, must be initialized.
//
// Additionally, we do not want to switch on the
// discriminant after it is free-ed, because that
// way lies only trouble.
let discr_ty = adt.repr.discr_type().to_ty(self.tcx);
let discr = Lvalue::Local(self.patch.new_temp(discr_ty));
let switch_block = self.patch.new_block(BasicBlockData {
statements: vec![
Statement {
source_info: c.source_info,
kind: StatementKind::Assign(discr.clone(),
Rvalue::Discriminant(c.lvalue.clone()))
}
],
terminator: Some(Terminator {
source_info: c.source_info,
kind: TerminatorKind::SwitchInt {
discr: Operand::Consume(discr),
switch_ty: discr_ty,
values: From::from(values),
targets: blocks,
}
}),
is_cleanup: c.is_cleanup,
});
self.drop_flag_test_block(c, switch_block)
}
}
}
/// The slow-path - create an "open", elaborated drop for a type
/// which is moved-out-of only partially, and patch `bb` to a jump
/// to it. This must not be called on ADTs with a destructor,
/// as these can't be moved-out-of, except for `Box<T>`, which is
/// special-cased.
///
/// This creates a "drop ladder" that drops the needed fields of the
/// ADT, both in the success case or if one of the destructors fail.
fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
match ty.sty {
ty::TyClosure(def_id, substs) => {
let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
self.open_drop_for_tuple(c, &tys)
}
ty::TyTuple(tys, _) => {
self.open_drop_for_tuple(c, tys)
}
ty::TyAdt(def, _) if def.is_box() => {
self.open_drop_for_box(c, ty.boxed_ty())
}
ty::TyAdt(def, substs) => {
self.open_drop_for_adt(c, def, substs)
}
_ => bug!("open drop from non-ADT `{:?}`", ty)
}
}
/// Return a basic block that drop an lvalue using the context
/// and path in `c`. If `update_drop_flag` is true, also
/// clear `c`.
///
/// if FLAG(c.path)
/// if(update_drop_flag) FLAG(c.path) = false
/// drop(c.lv)
fn complete_drop<'a>(
&mut self,
c: &DropCtxt<'a, 'tcx>,
update_drop_flag: bool)
-> BasicBlock
{
debug!("complete_drop({:?},{:?})", c, update_drop_flag);
let drop_block = self.drop_block(c);
if update_drop_flag {
self.set_drop_flag(
Location { block: drop_block, statement_index: 0 },
c.path,
DropFlagState::Absent
);
}
self.drop_flag_test_block(c, drop_block)
}
/// Create a simple conditional drop.
///
/// if FLAG(c.lv)
/// FLAGS(c.lv) = false
/// drop(c.lv)
fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
-> BasicBlock
{
debug!("conditional_drop({:?})", c);
let drop_bb = self.drop_block(c);
self.drop_flags_for_drop(c, drop_bb);
self.drop_flag_test_block(c, drop_bb)
}
fn new_block<'a>(&mut self,
c: &DropCtxt<'a, 'tcx>,
is_cleanup: bool,
k: TerminatorKind<'tcx>)
-> BasicBlock
{
self.patch.new_block(BasicBlockData {
statements: vec![],
terminator: Some(Terminator {
source_info: c.source_info, kind: k
}),
is_cleanup: is_cleanup
})
}
fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
debug!("elaborated_drop_block({:?})", c);
let blk = self.drop_block(c);
self.elaborate_drop(c, blk);
blk
}
fn drop_flag_test_block<'a>(&mut self,
c: &DropCtxt<'a, 'tcx>,
on_set: BasicBlock)
-> BasicBlock {
self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
}
fn drop_flag_test_block_with_succ<'a>(&mut self,
c: &DropCtxt<'a, 'tcx>,
is_cleanup: bool,
on_set: BasicBlock,
on_unset: BasicBlock)
-> BasicBlock
{
let (maybe_live, maybe_dead) = c.init_data.state(c.path);
debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
c, is_cleanup, on_set, (maybe_live, maybe_dead));
match (maybe_live, maybe_dead) {
(false, _) => on_unset,
(true, false) => on_set,
(true, true) => {
let flag = self.drop_flag(c.path).unwrap();
let term = TerminatorKind::if_(self.tcx, Operand::Consume(flag), on_set, on_unset);
self.new_block(c, is_cleanup, term)
}
}
}
fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
location: c.lvalue.clone(),
target: c.succ,
unwind: c.unwind
})
}
fn box_free_block<'a>(
&mut self,
c: &DropCtxt<'a, 'tcx>,
ty: Ty<'tcx>,
target: BasicBlock,
is_cleanup: bool
) -> BasicBlock {
let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
}
fn unelaborated_free_block<'a>(
&mut self,
c: &DropCtxt<'a, 'tcx>,
ty: Ty<'tcx>,
target: BasicBlock,
is_cleanup: bool
) -> BasicBlock {
let mut statements = vec![];
if let Some(&flag) = self.drop_flags.get(&c.path) {
statements.push(Statement {
source_info: c.source_info,
kind: StatementKind::Assign(
Lvalue::Local(flag),
self.constant_bool(c.source_info.span, false)
)
});
}
let tcx = self.tcx;
let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
let fty = tcx.item_type(free_func).subst(tcx, substs);
self.patch.new_block(BasicBlockData {
statements: statements,
terminator: Some(Terminator {
source_info: c.source_info, kind: TerminatorKind::Call {
func: Operand::Constant(Constant {
span: c.source_info.span,
ty: fty,
literal: Literal::Item {
def_id: free_func,
substs: substs
}
}),
args: vec![Operand::Consume(c.lvalue.clone())],
destination: Some((unit_temp, target)),
cleanup: None
}
}),
is_cleanup: is_cleanup
})
}
fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
// if we have a destuctor, we must *not* split the drop.
// dataflow can create unneeded children in some cases
// - be sure to ignore them.
let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
match ty.sty {
ty::TyAdt(def, _) => {
if def.has_dtor(self.tcx) && !def.is_box() {
self.tcx.sess.span_warn(
c.source_info.span,
&format!("dataflow bug??? moving out of type with dtor {:?}",
c));
true
} else {
false
}
}
_ => false
}
}
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
Rvalue::Use(Operand::Constant(Constant {
span: span,
@ -1023,15 +577,4 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
}
}
}
fn drop_flags_for_drop<'a>(&mut self,
c: &DropCtxt<'a, 'tcx>,
bb: BasicBlock)
{
let loc = self.patch.terminator_loc(self.mir, bb);
on_all_children_bits(
self.tcx, self.mir, self.move_data(), c.path,
|child| self.set_drop_flag(loc, child, DropFlagState::Absent)
);
}
}

View File

@ -16,12 +16,12 @@ use syntax_pos::DUMMY_SP;
use rustc::mir::{self, BasicBlock, BasicBlockData, Mir, Statement, Terminator, Location};
use rustc::session::Session;
use rustc::ty::{self, TyCtxt};
use rustc_mir::util::elaborate_drops::DropFlagState;
mod abs_domain;
pub mod elaborate_drops;
mod dataflow;
mod gather_moves;
mod patch;
// mod graphviz;
use self::dataflow::{BitDenotation};
@ -183,21 +183,6 @@ impl<'b, 'a: 'b, 'tcx: 'a> MirBorrowckCtxt<'b, 'a, 'tcx> {
}
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
enum DropFlagState {
Present, // i.e. initialized
Absent, // i.e. deinitialized or "moved"
}
impl DropFlagState {
fn value(self) -> bool {
match self {
DropFlagState::Present => true,
DropFlagState::Absent => false
}
}
}
fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>,
path: MovePathIndex,
mut cond: F)

View File

@ -26,8 +26,7 @@ use rustc::session::config::Input;
use rustc_borrowck as borrowck;
use rustc_borrowck::graphviz as borrowck_dot;
use rustc_mir::pretty::write_mir_pretty;
use rustc_mir::graphviz::write_mir_graphviz;
use rustc_mir::util::{write_mir_pretty, write_mir_graphviz};
use syntax::ast::{self, BlockCheckMode};
use syntax::fold::{self, Folder};

View File

@ -49,13 +49,11 @@ pub mod diagnostics;
pub mod build;
pub mod callgraph;
pub mod def_use;
pub mod graphviz;
mod hair;
mod shim;
pub mod mir_map;
pub mod pretty;
pub mod transform;
pub mod util;
use rustc::ty::maps::Providers;

View File

@ -23,8 +23,8 @@ use rustc::mir::Mir;
use rustc::mir::transform::MirSource;
use rustc::mir::visit::MutVisitor;
use shim;
use pretty;
use hair::cx::Cx;
use util as mir_util;
use rustc::traits::Reveal;
use rustc::ty::{self, Ty, TyCtxt};
@ -175,7 +175,7 @@ fn build_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
mem::transmute::<Mir, Mir<'tcx>>(mir)
};
pretty::dump_mir(tcx, "mir_map", &0, src, &mir);
mir_util::dump_mir(tcx, "mir_map", &0, src, &mir);
tcx.alloc_mir(mir)
})
@ -234,7 +234,7 @@ fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mem::transmute::<Mir, Mir<'tcx>>(mir)
};
pretty::dump_mir(tcx, "mir_map", &0, src, &mir);
mir_util::dump_mir(tcx, "mir_map", &0, src, &mir);
tcx.alloc_mir(mir)
})

View File

@ -29,11 +29,11 @@
//! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the
//! future.
use def_use::DefUseAnalysis;
use rustc::mir::{Constant, Local, LocalKind, Location, Lvalue, Mir, Operand, Rvalue, StatementKind};
use rustc::mir::transform::{MirPass, MirSource, Pass};
use rustc::mir::visit::MutVisitor;
use rustc::ty::TyCtxt;
use util::def_use::DefUseAnalysis;
use transform::qualify_consts;
pub struct CopyPropagation;

View File

@ -15,7 +15,7 @@ use std::fmt;
use rustc::ty::TyCtxt;
use rustc::mir::*;
use rustc::mir::transform::{Pass, MirPass, MirPassHook, MirSource};
use pretty;
use util as mir_util;
pub struct Marker<'a>(pub &'a str);
@ -56,7 +56,7 @@ impl<'tcx> MirPassHook<'tcx> for DumpMir {
pass: &Pass,
is_after: bool)
{
pretty::dump_mir(
mir_util::dump_mir(
tcx,
&*pass.name(),
&Disambiguator {

View File

@ -0,0 +1,561 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
use rustc::mir::*;
use rustc::middle::lang_items;
use rustc::ty::{self, Ty};
use rustc::ty::subst::{Kind, Subst, Substs};
use rustc::ty::util::IntTypeExt;
use rustc_data_structures::indexed_vec::Idx;
use util::patch::MirPatch;
use std::iter;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum DropFlagState {
Present, // i.e. initialized
Absent, // i.e. deinitialized or "moved"
}
impl DropFlagState {
pub fn value(self) -> bool {
match self {
DropFlagState::Present => true,
DropFlagState::Absent => false
}
}
}
#[derive(Debug)]
pub enum DropStyle {
Dead,
Static,
Conditional,
Open,
}
#[derive(Debug)]
pub enum DropFlagMode {
Shallow,
Deep
}
pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
type Path : Copy + fmt::Debug;
fn patch(&mut self) -> &mut MirPatch<'tcx>;
fn mir(&self) -> &'a Mir<'tcx>;
fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx>;
fn param_env(&self) -> &'a ty::ParameterEnvironment<'tcx>;
fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path>;
}
#[derive(Debug)]
struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
where D : DropElaborator<'b, 'tcx> + 'l
{
elaborator: &'l mut D,
source_info: SourceInfo,
is_cleanup: bool,
lvalue: &'l Lvalue<'tcx>,
path: D::Path,
succ: BasicBlock,
unwind: Option<BasicBlock>,
}
pub fn elaborate_drop<'b, 'tcx, D>(
elaborator: &mut D,
source_info: SourceInfo,
is_cleanup: bool,
lvalue: &Lvalue<'tcx>,
path: D::Path,
succ: BasicBlock,
unwind: Option<BasicBlock>,
bb: BasicBlock)
where D: DropElaborator<'b, 'tcx>
{
DropCtxt {
elaborator, source_info, is_cleanup, lvalue, path, succ, unwind
}.elaborate_drop(bb)
}
impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
where D: DropElaborator<'b, 'tcx>
{
fn lvalue_ty(&self, lvalue: &Lvalue<'tcx>) -> Ty<'tcx> {
lvalue.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
}
fn tcx(&self) -> ty::TyCtxt<'b, 'tcx, 'tcx> {
self.elaborator.tcx()
}
/// This elaborates a single drop instruction, located at `bb`, and
/// patches over it.
///
/// The elaborated drop checks the drop flags to only drop what
/// is initialized.
///
/// In addition, the relevant drop flags also need to be cleared
/// to avoid double-drops. However, in the middle of a complex
/// drop, one must avoid clearing some of the flags before they
/// are read, as that would cause a memory leak.
///
/// In particular, when dropping an ADT, multiple fields may be
/// joined together under the `rest` subpath. They are all controlled
/// by the primary drop flag, but only the last rest-field dropped
/// should clear it (and it must also not clear anything else).
///
/// FIXME: I think we should just control the flags externally
/// and then we do not need this machinery.
pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
debug!("elaborate_drop({:?})", self);
let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
debug!("elaborate_drop({:?}): live - {:?}", self, style);
match style {
DropStyle::Dead => {
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
target: self.succ
});
}
DropStyle::Static => {
let loc = self.terminator_loc(bb);
self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
location: self.lvalue.clone(),
target: self.succ,
unwind: self.unwind
});
}
DropStyle::Conditional => {
let drop_bb = self.complete_drop(Some(DropFlagMode::Deep));
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
target: drop_bb
});
}
DropStyle::Open => {
let drop_bb = self.open_drop();
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
target: drop_bb
});
}
}
}
/// Return the lvalue and move path for each field of `variant`,
/// (the move path is `None` if the field is a rest field).
fn move_paths_for_fields(&self,
base_lv: &Lvalue<'tcx>,
variant_path: D::Path,
variant: &'tcx ty::VariantDef,
substs: &'tcx Substs<'tcx>)
-> Vec<(Lvalue<'tcx>, Option<D::Path>)>
{
variant.fields.iter().enumerate().map(|(i, f)| {
let field = Field::new(i);
let subpath = self.elaborator.field_subpath(variant_path, field);
let field_ty =
self.tcx().normalize_associated_type_in_env(
&f.ty(self.tcx(), substs),
self.elaborator.param_env()
);
(base_lv.clone().field(field, field_ty), subpath)
}).collect()
}
fn drop_subpath(&mut self,
is_cleanup: bool,
lvalue: &Lvalue<'tcx>,
path: Option<D::Path>,
succ: BasicBlock,
unwind: Option<BasicBlock>)
-> BasicBlock
{
if let Some(path) = path {
debug!("drop_subpath: for std field {:?}", lvalue);
DropCtxt {
elaborator: self.elaborator,
source_info: self.source_info,
path, lvalue, succ, unwind, is_cleanup
}.elaborated_drop_block()
} else {
debug!("drop_subpath: for rest field {:?}", lvalue);
DropCtxt {
elaborator: self.elaborator,
source_info: self.source_info,
lvalue, succ, unwind, is_cleanup,
// Using `self.path` here to condition the drop on
// our own drop flag.
path: self.path
}.complete_drop(None)
}
}
/// Create one-half of the drop ladder for a list of fields, and return
/// the list of steps in it in reverse order.
///
/// `unwind_ladder` is such a list of steps in reverse order,
/// which is called instead of the next step if the drop unwinds
/// (the first field is never reached). If it is `None`, all
/// unwind targets are left blank.
fn drop_halfladder<'a>(&mut self,
unwind_ladder: Option<Vec<BasicBlock>>,
succ: BasicBlock,
fields: &[(Lvalue<'tcx>, Option<D::Path>)],
is_cleanup: bool)
-> Vec<BasicBlock>
{
let mut unwind_succ = if is_cleanup {
None
} else {
self.unwind
};
let goto = TerminatorKind::Goto { target: succ };
let mut succ = self.new_block(is_cleanup, goto);
// Always clear the "master" drop flag at the bottom of the
// ladder. This is needed because the "master" drop flag
// protects the ADT's discriminant, which is invalidated
// after the ADT is dropped.
let succ_loc = Location { block: succ, statement_index: 0 };
self.elaborator.clear_drop_flag(succ_loc, self.path, DropFlagMode::Shallow);
fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
succ = self.drop_subpath(is_cleanup, lv, path, succ, unwind_succ);
unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
succ
}).collect()
}
/// Create a full drop ladder, consisting of 2 connected half-drop-ladders
///
/// For example, with 3 fields, the drop ladder is
///
/// .d0:
/// ELAB(drop location.0 [target=.d1, unwind=.c1])
/// .d1:
/// ELAB(drop location.1 [target=.d2, unwind=.c2])
/// .d2:
/// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
/// .c1:
/// ELAB(drop location.1 [target=.c2])
/// .c2:
/// ELAB(drop location.2 [target=`self.unwind])
fn drop_ladder<'a>(&mut self,
fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>)
-> BasicBlock
{
debug!("drop_ladder({:?}, {:?})", self, fields);
let mut fields = fields;
fields.retain(|&(ref lvalue, _)| {
self.tcx().type_needs_drop_given_env(
self.lvalue_ty(lvalue), self.elaborator.param_env())
});
debug!("drop_ladder - fields needing drop: {:?}", fields);
let unwind_ladder = if self.is_cleanup {
None
} else {
let unwind = self.unwind.unwrap(); // FIXME(#6393)
Some(self.drop_halfladder(None, unwind, &fields, true))
};
let succ = self.succ; // FIXME(#6393)
let is_cleanup = self.is_cleanup;
self.drop_halfladder(unwind_ladder, succ, &fields, is_cleanup)
.last().cloned().unwrap_or(succ)
}
fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
-> BasicBlock
{
debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
let fields = tys.iter().enumerate().map(|(i, &ty)| {
(self.lvalue.clone().field(Field::new(i), ty),
self.elaborator.field_subpath(self.path, Field::new(i)))
}).collect();
self.drop_ladder(fields)
}
fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
{
debug!("open_drop_for_box({:?}, {:?})", self, ty);
let interior = self.lvalue.clone().deref();
let interior_path = self.elaborator.deref_subpath(self.path);
let succ = self.succ; // FIXME(#6393)
let is_cleanup = self.is_cleanup;
let succ = self.box_free_block(ty, succ, is_cleanup);
let unwind_succ = self.unwind.map(|u| {
self.box_free_block(ty, u, true)
});
self.drop_subpath(is_cleanup, &interior, interior_path, succ, unwind_succ)
}
fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
-> BasicBlock {
debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
match adt.variants.len() {
1 => {
let fields = self.move_paths_for_fields(
self.lvalue,
self.path,
&adt.variants[0],
substs
);
self.drop_ladder(fields)
}
_ => {
let mut values = Vec::with_capacity(adt.variants.len());
let mut blocks = Vec::with_capacity(adt.variants.len());
let mut otherwise = None;
for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
let subpath = self.elaborator.downcast_subpath(
self.path, variant_index);
if let Some(variant_path) = subpath {
let base_lv = self.lvalue.clone().elem(
ProjectionElem::Downcast(adt, variant_index)
);
let fields = self.move_paths_for_fields(
&base_lv,
variant_path,
&adt.variants[variant_index],
substs);
values.push(discr);
blocks.push(self.drop_ladder(fields));
} else {
// variant not found - drop the entire enum
if let None = otherwise {
otherwise =
Some(self.complete_drop(Some(DropFlagMode::Shallow)));
}
}
}
if let Some(block) = otherwise {
blocks.push(block);
} else {
values.pop();
}
// If there are multiple variants, then if something
// is present within the enum the discriminant, tracked
// by the rest path, must be initialized.
//
// Additionally, we do not want to switch on the
// discriminant after it is free-ed, because that
// way lies only trouble.
let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
let discr = Lvalue::Local(self.new_temp(discr_ty));
let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
let switch_block = self.elaborator.patch().new_block(BasicBlockData {
statements: vec![
Statement {
source_info: self.source_info,
kind: StatementKind::Assign(discr.clone(), discr_rv),
}
],
terminator: Some(Terminator {
source_info: self.source_info,
kind: TerminatorKind::SwitchInt {
discr: Operand::Consume(discr),
switch_ty: discr_ty,
values: From::from(values),
targets: blocks,
}
}),
is_cleanup: self.is_cleanup,
});
self.drop_flag_test_block(switch_block)
}
}
}
/// The slow-path - create an "open", elaborated drop for a type
/// which is moved-out-of only partially, and patch `bb` to a jump
/// to it. This must not be called on ADTs with a destructor,
/// as these can't be moved-out-of, except for `Box<T>`, which is
/// special-cased.
///
/// This creates a "drop ladder" that drops the needed fields of the
/// ADT, both in the success case or if one of the destructors fail.
fn open_drop<'a>(&mut self) -> BasicBlock {
let ty = self.lvalue_ty(self.lvalue);
match ty.sty {
ty::TyClosure(def_id, substs) => {
let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
self.open_drop_for_tuple(&tys)
}
ty::TyTuple(tys, _) => {
self.open_drop_for_tuple(tys)
}
ty::TyAdt(def, _) if def.is_box() => {
self.open_drop_for_box(ty.boxed_ty())
}
ty::TyAdt(def, substs) => {
self.open_drop_for_adt(def, substs)
}
_ => bug!("open drop from non-ADT `{:?}`", ty)
}
}
/// Return a basic block that drop an lvalue using the context
/// and path in `c`. If `mode` is something, also clear `c`
/// according to it.
///
/// if FLAG(self.path)
/// if let Some(mode) = mode: FLAG(self.path)[mode] = false
/// drop(self.lv)
fn complete_drop<'a>(&mut self, drop_mode: Option<DropFlagMode>) -> BasicBlock
{
debug!("complete_drop({:?},{:?})", self, drop_mode);
let drop_block = self.drop_block();
if let Some(mode) = drop_mode {
let block_start = Location { block: drop_block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, mode);
}
self.drop_flag_test_block(drop_block)
}
fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
debug!("elaborated_drop_block({:?})", self);
let blk = self.drop_block();
self.elaborate_drop(blk);
blk
}
fn box_free_block<'a>(
&mut self,
ty: Ty<'tcx>,
target: BasicBlock,
is_cleanup: bool
) -> BasicBlock {
let block = self.unelaborated_free_block(ty, target, is_cleanup);
self.drop_flag_test_block_with_succ(is_cleanup, block, target)
}
fn unelaborated_free_block<'a>(
&mut self,
ty: Ty<'tcx>,
target: BasicBlock,
is_cleanup: bool
) -> BasicBlock {
let tcx = self.tcx();
let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
let fty = tcx.item_type(free_func).subst(tcx, substs);
let free_block = self.elaborator.patch().new_block(BasicBlockData {
statements: vec![],
terminator: Some(Terminator {
source_info: self.source_info, kind: TerminatorKind::Call {
func: Operand::Constant(Constant {
span: self.source_info.span,
ty: fty,
literal: Literal::Item {
def_id: free_func,
substs: substs
}
}),
args: vec![Operand::Consume(self.lvalue.clone())],
destination: Some((unit_temp, target)),
cleanup: None
}
}),
is_cleanup: is_cleanup
});
let block_start = Location { block: free_block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
free_block
}
fn drop_block<'a>(&mut self) -> BasicBlock {
let block = TerminatorKind::Drop {
location: self.lvalue.clone(),
target: self.succ,
unwind: self.unwind
};
let is_cleanup = self.is_cleanup; // FIXME(#6393)
self.new_block(is_cleanup, block)
}
fn drop_flag_test_block<'a>(&mut self, on_set: BasicBlock) -> BasicBlock {
let is_cleanup = self.is_cleanup;
let succ = self.succ; // FIXME(#6393)
self.drop_flag_test_block_with_succ(is_cleanup, on_set, succ)
}
fn drop_flag_test_block_with_succ<'a>(&mut self,
is_cleanup: bool,
on_set: BasicBlock,
on_unset: BasicBlock)
-> BasicBlock
{
let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
self, is_cleanup, on_set, style);
match style {
DropStyle::Dead => on_unset,
DropStyle::Static => on_set,
DropStyle::Conditional | DropStyle::Open => {
let flag = self.elaborator.get_drop_flag(self.path).unwrap();
let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
self.new_block(is_cleanup, term)
}
}
}
fn new_block<'a>(&mut self,
is_cleanup: bool,
k: TerminatorKind<'tcx>)
-> BasicBlock
{
self.elaborator.patch().new_block(BasicBlockData {
statements: vec![],
terminator: Some(Terminator {
source_info: self.source_info, kind: k
}),
is_cleanup: is_cleanup
})
}
fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
self.elaborator.patch().new_temp(ty)
}
fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
let mir = self.elaborator.mir();
self.elaborator.patch().terminator_loc(mir, bb)
}
}

View File

@ -0,0 +1,20 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub mod elaborate_drops;
pub mod def_use;
pub mod patch;
mod graphviz;
mod pretty;
pub use self::pretty::{dump_mir, write_mir_pretty};
pub use self::graphviz::{write_mir_graphviz};
pub use self::graphviz::write_node_label as write_graphviz_node_label;