Synthesize calls to box_free language item

This gets rid of Drop(Free, _) MIR construct by synthesizing a call to language item which
takes care of dropping instead.
This commit is contained in:
Simonas Kazlauskas 2016-01-28 23:59:00 +02:00
parent 7b9d6d3bc8
commit 432460a6fc
20 changed files with 127 additions and 69 deletions

View File

@ -39,11 +39,17 @@ unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
p
}
#[lang = "exchange_free"]
unsafe fn deallocate(ptr: *mut u8, _size: usize, _align: usize) {
libc::free(ptr as *mut libc::c_void)
}
#[lang = "box_free"]
unsafe fn box_free<T>(ptr: *mut T) {
deallocate(ptr as *mut u8, ::core::mem::size_of::<T>(), ::core::mem::align_of::<T>());
}
#[start]
fn main(argc: isize, argv: *const *const u8) -> isize {
let x = box 1;

View File

@ -16,6 +16,8 @@
issue = "27700")]
use core::{isize, usize};
#[cfg(not(test))]
use core::intrinsics::{size_of, min_align_of};
#[allow(improper_ctypes)]
extern "C" {
@ -147,6 +149,17 @@ unsafe fn exchange_free(ptr: *mut u8, old_size: usize, align: usize) {
deallocate(ptr, old_size, align);
}
#[cfg(not(test))]
#[lang = "box_free"]
#[inline]
unsafe fn box_free<T>(ptr: *mut T) {
let size = size_of::<T>();
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
if size != 0 {
deallocate(ptr as *mut u8, size, min_align_of::<T>());
}
}
#[cfg(test)]
mod tests {
extern crate test;

View File

@ -435,7 +435,7 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> {
let is_named = node.name().is_some();
let field_type = self.tcx.node_id_to_type(node.id);
let is_marker_field = match field_type.ty_to_def_id() {
Some(def_id) => self.tcx.lang_items.items().any(|(_, item)| *item == Some(def_id)),
Some(def_id) => self.tcx.lang_items.items().iter().any(|item| *item == Some(def_id)),
_ => false
};
is_named

View File

@ -36,9 +36,6 @@ use syntax::parse::token::InternedString;
use rustc_front::intravisit::Visitor;
use rustc_front::hir;
use std::iter::Enumerate;
use std::slice;
// The actual lang items defined come at the end of this file in one handy table.
// So you probably just want to nip down to the end.
macro_rules! lets_do_this {
@ -69,8 +66,8 @@ impl LanguageItems {
}
}
pub fn items<'a>(&'a self) -> Enumerate<slice::Iter<'a, Option<DefId>>> {
self.items.iter().enumerate()
pub fn items(&self) -> &[Option<DefId>] {
&*self.items
}
pub fn item_name(index: usize) -> &'static str {
@ -334,6 +331,7 @@ lets_do_this! {
ExchangeMallocFnLangItem, "exchange_malloc", exchange_malloc_fn;
ExchangeFreeFnLangItem, "exchange_free", exchange_free_fn;
BoxFreeFnLangItem, "box_free", box_free_fn;
StrDupUniqFnLangItem, "strdup_uniq", strdup_uniq_fn;
StartFnLangItem, "start", start_fn;

View File

@ -362,7 +362,7 @@ pub fn find_reachable(tcx: &ty::ctxt,
for (id, _) in &access_levels.map {
reachable_context.worklist.push(*id);
}
for (_, item) in tcx.lang_items.items() {
for item in tcx.lang_items.items().iter() {
if let Some(did) = *item {
if let Some(node_id) = tcx.map.as_local_node_id(did) {
reachable_context.worklist.push(node_id);

View File

@ -490,14 +490,7 @@ pub struct Statement<'tcx> {
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum StatementKind<'tcx> {
Assign(Lvalue<'tcx>, Rvalue<'tcx>),
Drop(DropKind, Lvalue<'tcx>),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
pub enum DropKind {
/// free a partially constructed box, should go away eventually
Free,
Deep
Drop(Lvalue<'tcx>),
}
impl<'tcx> Debug for Statement<'tcx> {
@ -505,8 +498,7 @@ impl<'tcx> Debug for Statement<'tcx> {
use self::StatementKind::*;
match self.kind {
Assign(ref lv, ref rv) => write!(fmt, "{:?} = {:?}", lv, rv),
Drop(DropKind::Free, ref lv) => write!(fmt, "free {:?}", lv),
Drop(DropKind::Deep, ref lv) => write!(fmt, "drop {:?}", lv),
Drop(ref lv) => write!(fmt, "drop {:?}", lv),
}
}
}

View File

@ -124,7 +124,7 @@ macro_rules! make_mir_visitor {
ref $($mutability)* rvalue) => {
self.visit_assign(block, lvalue, rvalue);
}
StatementKind::Drop(_, ref $($mutability)* lvalue) => {
StatementKind::Drop(ref $($mutability)* lvalue) => {
self.visit_lvalue(lvalue, LvalueContext::Drop);
}
}

View File

@ -1649,7 +1649,7 @@ fn encode_crate_deps(rbml_w: &mut Encoder, cstore: &cstore::CStore) {
fn encode_lang_items(ecx: &EncodeContext, rbml_w: &mut Encoder) {
rbml_w.start_tag(tag_lang_items);
for (i, &opt_def_id) in ecx.tcx.lang_items.items() {
for (i, &opt_def_id) in ecx.tcx.lang_items.items().iter().enumerate() {
if let Some(def_id) = opt_def_id {
if def_id.is_local() {
rbml_w.start_tag(tag_lang_items_item);

View File

@ -32,16 +32,21 @@ impl<'tcx> CFG<'tcx> {
BasicBlock::new(node_index)
}
pub fn start_new_cleanup_block(&mut self) -> BasicBlock {
let bb = self.start_new_block();
self.block_data_mut(bb).is_cleanup = true;
bb
}
pub fn push(&mut self, block: BasicBlock, statement: Statement<'tcx>) {
debug!("push({:?}, {:?})", block, statement);
self.block_data_mut(block).statements.push(statement);
}
pub fn push_drop(&mut self, block: BasicBlock, span: Span,
kind: DropKind, lvalue: &Lvalue<'tcx>) {
pub fn push_drop(&mut self, block: BasicBlock, span: Span, lvalue: &Lvalue<'tcx>) {
self.push(block, Statement {
span: span,
kind: StatementKind::Drop(kind, lvalue.clone())
kind: StatementKind::Drop(lvalue.clone())
});
}

View File

@ -59,25 +59,18 @@ impl<'a,'tcx> Builder<'a,'tcx> {
let arg = unpack!(block = this.as_operand(block, arg));
block.and(Rvalue::UnaryOp(op, arg))
}
ExprKind::Box { value } => {
ExprKind::Box { value, value_extents } => {
let value = this.hir.mirror(value);
let result = this.temp(expr.ty);
// to start, malloc some memory of suitable type (thus far, uninitialized):
let rvalue = Rvalue::Box(value.ty);
this.cfg.push_assign(block, expr_span, &result, rvalue);
// schedule a shallow free of that memory, lest we unwind:
let extent = this.extent_of_innermost_scope();
this.schedule_drop(expr_span, extent, DropKind::Free, &result, value.ty);
// initialize the box contents:
let contents = result.clone().deref();
unpack!(block = this.into(&contents, block, value));
// now that the result is fully initialized, cancel the drop
// by "using" the result (which is linear):
block.and(Rvalue::Use(Operand::Consume(result)))
this.cfg.push_assign(block, expr_span, &result, Rvalue::Box(value.ty));
this.in_scope(value_extents, block, |this| {
// schedule a shallow free of that memory, lest we unwind:
this.schedule_box_free(expr_span, value_extents, &result, value.ty);
// initialize the box contents:
unpack!(block = this.into(&result.clone().deref(), block, value));
block.and(Rvalue::Use(Operand::Consume(result)))
})
}
ExprKind::Cast { source } => {
let source = unpack!(block = this.as_operand(block, source));

View File

@ -41,7 +41,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
this.hir.span_bug(expr.span, "no temp_lifetime for expr");
}
};
this.schedule_drop(expr.span, temp_lifetime, DropKind::Deep, &temp, expr_ty);
this.schedule_drop(expr.span, temp_lifetime, &temp, expr_ty);
// Careful here not to cause an infinite cycle. If we always
// called `into`, then for lvalues like `x.f`, it would

View File

@ -188,7 +188,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
// operators like x[j] = x[i].
let rhs = unpack!(block = this.as_operand(block, rhs));
let lhs = unpack!(block = this.as_lvalue(block, lhs));
this.cfg.push_drop(block, expr_span, DropKind::Deep, &lhs);
this.cfg.push_drop(block, expr_span, &lhs);
this.cfg.push_assign(block, expr_span, &lhs, Rvalue::Use(rhs));
block.unit()
}

View File

@ -601,7 +601,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
ty: var_ty.clone(),
});
let index = index as u32;
self.schedule_drop(span, var_extent, DropKind::Deep, &Lvalue::Var(index), var_ty);
self.schedule_drop(span, var_extent, &Lvalue::Var(index), var_ty);
self.var_indices.insert(var_id, index);
debug!("declare_binding: index={:?}", index);

View File

@ -89,7 +89,7 @@ should go to.
use build::{BlockAnd, BlockAndExtension, Builder};
use rustc::middle::region::CodeExtent;
use rustc::middle::lang_items;
use rustc::middle::subst::Substs;
use rustc::middle::subst::{Substs, VecPerParamSpace};
use rustc::middle::ty::{self, Ty};
use rustc::mir::repr::*;
use syntax::codemap::{Span, DUMMY_SP};
@ -97,7 +97,8 @@ use syntax::parse::token::intern_and_get_ident;
pub struct Scope<'tcx> {
extent: CodeExtent,
drops: Vec<(DropKind, Span, Lvalue<'tcx>)>,
drops: Vec<(Span, Lvalue<'tcx>)>,
frees: Vec<(Span, Lvalue<'tcx>, Ty<'tcx>)>,
cached_block: Option<BasicBlock>,
}
@ -164,6 +165,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
self.scopes.push(Scope {
extent: extent.clone(),
drops: vec![],
frees: vec![],
cached_block: None,
});
}
@ -180,8 +182,8 @@ impl<'a,'tcx> Builder<'a,'tcx> {
// add in any drops needed on the fallthrough path (any other
// exiting paths, such as those that arise from `break`, will
// have drops already)
for (kind, span, lvalue) in scope.drops {
self.cfg.push_drop(block, span, kind, &lvalue);
for (span, lvalue) in scope.drops {
self.cfg.push_drop(block, span, &lvalue);
}
}
@ -225,8 +227,8 @@ impl<'a,'tcx> Builder<'a,'tcx> {
});
for scope in scopes.iter_mut().rev().take(scope_count) {
for &(kind, drop_span, ref lvalue) in &scope.drops {
cfg.push_drop(block, drop_span, kind, lvalue);
for &(drop_span, ref lvalue) in &scope.drops {
cfg.push_drop(block, drop_span, lvalue);
}
}
cfg.terminate(block, Terminator::Goto { target: target });
@ -242,23 +244,55 @@ impl<'a,'tcx> Builder<'a,'tcx> {
return None;
}
let tcx = self.hir.tcx();
let unit_tmp = self.get_unit_temp();
let mut terminator = Terminator::Resume;
// Given an array of scopes, we generate these from the outermost scope to the innermost
// one. Thus for array [S0, S1, S2] with corresponding cleanup blocks [B0, B1, B2], we will
// generate B0 <- B1 <- B2 in left-to-right order. The outermost scope (B0) will always
// terminate with a Resume terminator.
for scope in self.scopes.iter_mut().filter(|s| !s.drops.is_empty()) {
for scope in self.scopes.iter_mut().filter(|s| !s.drops.is_empty() || !s.frees.is_empty()) {
if let Some(b) = scope.cached_block {
terminator = Terminator::Goto { target: b };
continue;
} else {
let new_block = self.cfg.start_new_block();
self.cfg.block_data_mut(new_block).is_cleanup = true;
let mut new_block = self.cfg.start_new_cleanup_block();
self.cfg.terminate(new_block, terminator);
terminator = Terminator::Goto { target: new_block };
for &(kind, span, ref lvalue) in scope.drops.iter().rev() {
self.cfg.push_drop(new_block, span, kind, lvalue);
for &(span, ref lvalue) in scope.drops.iter().rev() {
self.cfg.push_drop(new_block, span, lvalue);
}
for &(_, ref lvalue, ref item_ty) in scope.frees.iter().rev() {
let item = lang_items::SpannedLangItems::box_free_fn(&tcx.lang_items)
.expect("box_free language item required");
let substs = tcx.mk_substs(Substs::new(
VecPerParamSpace::new(vec![], vec![], vec![item_ty]),
VecPerParamSpace::new(vec![], vec![], vec![])
));
let func = Constant {
span: item.1,
ty: tcx.lookup_item_type(item.0).ty.subst(substs),
literal: Literal::Item {
def_id: item.0,
kind: ItemKind::Function,
substs: substs
}
};
let old_block = new_block;
new_block = self.cfg.start_new_cleanup_block();
self.cfg.terminate(new_block, Terminator::Call {
func: Operand::Constant(func),
args: vec![Operand::Consume(lvalue.clone())],
kind: CallKind::Converging {
target: old_block,
destination: unit_tmp.clone()
}
});
terminator = Terminator::Goto { target: new_block };
}
scope.cached_block = Some(new_block);
}
}
@ -272,7 +306,6 @@ impl<'a,'tcx> Builder<'a,'tcx> {
pub fn schedule_drop(&mut self,
span: Span,
extent: CodeExtent,
kind: DropKind,
lvalue: &Lvalue<'tcx>,
lvalue_ty: Ty<'tcx>) {
if self.hir.needs_drop(lvalue_ty) {
@ -282,7 +315,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
// incorrect (i.e. they still are pointing at old cached_block).
scope.cached_block = None;
if scope.extent == extent {
scope.drops.push((kind, span, lvalue.clone()));
scope.drops.push((span, lvalue.clone()));
return;
}
}
@ -291,6 +324,27 @@ impl<'a,'tcx> Builder<'a,'tcx> {
}
}
/// Schedule dropping of a not yet fully initialised box. This cleanup will (and should) only
/// be translated into unwind branch. The extent should be for the `EXPR` inside `box EXPR`.
pub fn schedule_box_free(&mut self,
span: Span,
extent: CodeExtent,
lvalue: &Lvalue<'tcx>,
item_ty: Ty<'tcx>) {
for scope in self.scopes.iter_mut().rev() {
// We must invalidate all the cached_blocks leading up to the scope were looking
// for, because otherwise some/most of the blocks in the chain might become
// incorrect (i.e. they still are pointing at old cached_block).
scope.cached_block = None;
if scope.extent == extent {
scope.frees.push((span, lvalue.clone(), item_ty));
return;
}
}
self.hir.span_bug(span,
&format!("extent {:?} not in scope to drop {:?}", extent, lvalue));
}
pub fn extent_of_innermost_scope(&self) -> CodeExtent {
self.scopes.last().map(|scope| scope.extent).unwrap()
}
@ -299,6 +353,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
self.scopes.first().map(|scope| scope.extent).unwrap()
}
pub fn panic_bounds_check(&mut self,
block: BasicBlock,
index: Operand<'tcx>,
@ -405,4 +460,5 @@ impl<'a,'tcx> Builder<'a,'tcx> {
literal: self.hir.usize_literal(span_lines.line)
})
}
}

View File

@ -72,7 +72,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
let expr = this.hir.mirror(expr);
let temp = this.temp(expr.ty.clone());
unpack!(block = this.into(&temp, block, expr));
this.cfg.push_drop(block, span, DropKind::Deep, &temp);
this.cfg.push_drop(block, span, &temp);
block.unit()
}));
}

View File

@ -375,7 +375,10 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
hir::ExprType(ref source, _) =>
return source.make_mirror(cx),
hir::ExprBox(ref value) =>
ExprKind::Box { value: value.to_ref() },
ExprKind::Box {
value: value.to_ref(),
value_extents: cx.tcx.region_maps.node_extent(value.id)
},
hir::ExprVec(ref fields) =>
ExprKind::Vec { fields: fields.to_ref() },
hir::ExprTup(ref fields) =>

View File

@ -123,6 +123,7 @@ pub enum ExprKind<'tcx> {
},
Box {
value: ExprRef<'tcx>,
value_extents: CodeExtent,
},
Call {
ty: ty::Ty<'tcx>,

View File

@ -69,7 +69,7 @@ impl<'a, 'tcx> EraseRegions<'a, 'tcx> {
self.erase_regions_lvalue(lvalue);
self.erase_regions_rvalue(rvalue);
}
StatementKind::Drop(_, ref mut lvalue) => {
StatementKind::Drop(ref mut lvalue) => {
self.erase_regions_lvalue(lvalue);
}
}

View File

@ -82,13 +82,13 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
constant: &mir::Constant<'tcx>)
-> OperandRef<'tcx>
{
let ty = bcx.monomorphize(&constant.ty);
match constant.literal {
mir::Literal::Item { def_id, kind, substs } => {
let substs = bcx.tcx().mk_substs(bcx.monomorphize(&substs));
self.trans_item_ref(bcx, ty, kind, substs, def_id)
self.trans_item_ref(bcx, constant.ty, kind, substs, def_id)
}
mir::Literal::Value { ref value } => {
let ty = bcx.monomorphize(&constant.ty);
self.trans_constval(bcx, value, ty)
}
}

View File

@ -8,7 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc::middle::ty::LvaluePreference;
use rustc::mir::repr as mir;
use trans::common::Block;
use trans::debuginfo::DebugLoc;
@ -52,19 +51,11 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}
}
mir::StatementKind::Drop(mir::DropKind::Deep, ref lvalue) => {
mir::StatementKind::Drop(ref lvalue) => {
let tr_lvalue = self.trans_lvalue(bcx, lvalue);
let ty = tr_lvalue.ty.to_ty(bcx.tcx());
glue::drop_ty(bcx, tr_lvalue.llval, ty, DebugLoc::None)
}
mir::StatementKind::Drop(mir::DropKind::Free, ref lvalue) => {
let tr_lvalue = self.trans_lvalue(bcx, lvalue);
let ty = tr_lvalue.ty.to_ty(bcx.tcx());
let content_ty = ty.builtin_deref(true, LvaluePreference::NoPreference);
let content_ty = content_ty.unwrap().ty;
glue::trans_exchange_free_ty(bcx, tr_lvalue.llval, content_ty, DebugLoc::None)
}
}
}
}