Remove common::Block.

This commit is contained in:
Mark-Simulacrum 2016-12-11 22:19:39 -07:00 committed by Mark Simulacrum
parent 3dbd141b8c
commit 8f3d824cc7
15 changed files with 201 additions and 330 deletions

View File

@ -575,8 +575,8 @@ pub fn with_cond<'blk, 'tcx, F>(
}
let fcx = bcx.fcx();
let next_cx = fcx.new_block("next").build();
let cond_cx = fcx.new_block("cond").build();
let next_cx = fcx.build_new_block("next");
let cond_cx = fcx.build_new_block("cond");
bcx.cond_br(val, cond_cx.llbb(), next_cx.llbb());
let after_cx = f(cond_cx);
after_cx.br(next_cx.llbb());
@ -619,17 +619,6 @@ impl Lifetime {
}
}
// Generates code for resumption of unwind at the end of a landing pad.
pub fn trans_unwind_resume(bcx: &BlockAndBuilder, lpval: ValueRef) {
if !bcx.sess().target.target.options.custom_unwind_resume {
bcx.resume(lpval);
} else {
let exc_ptr = bcx.extract_value(lpval, 0);
bcx.call(bcx.fcx().eh_unwind_resume().reify(bcx.ccx()), &[exc_ptr],
bcx.lpad().and_then(|b| b.bundle()));
}
}
pub fn call_memcpy<'bcx, 'tcx>(b: &Builder<'bcx, 'tcx>,
dst: ValueRef,
src: ValueRef,
@ -727,8 +716,8 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
llfndecl: ValueRef,
fn_ty: FnType,
definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi)>,
block_arena: &'blk TypedArena<common::BlockS<'blk, 'tcx>>)
definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi)>)
//block_arena: &'blk TypedArena<common::BlockS<'blk, 'tcx>>)
-> FunctionContext<'blk, 'tcx> {
let (param_substs, def_id) = match definition {
Some((instance, ..)) => {
@ -772,7 +761,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
fn_ty: fn_ty,
param_substs: param_substs,
span: None,
block_arena: block_arena,
//block_arena: block_arena,
lpad_arena: TypedArena::new(),
ccx: ccx,
debug_context: debug_context,
@ -783,7 +772,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
/// Performs setup on a newly created function, creating the entry
/// scope block and allocating space for the return pointer.
pub fn init(&'blk self, skip_retptr: bool) -> BlockAndBuilder<'blk, 'tcx> {
let entry_bcx = self.new_block("entry-block").build();
let entry_bcx = self.build_new_block("entry-block");
// Use a dummy instruction as the insertion point for all allocas.
// This is later removed in FunctionContext::cleanup.
@ -924,13 +913,7 @@ pub fn trans_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, instance: Instance
let fn_ty = FnType::new(ccx, abi, &sig, &[]);
let (arena, fcx): (TypedArena<_>, FunctionContext);
arena = TypedArena::new();
fcx = FunctionContext::new(ccx,
lldecl,
fn_ty,
Some((instance, &sig, abi)),
&arena);
let fcx = FunctionContext::new(ccx, lldecl, fn_ty, Some((instance, &sig, abi)));
if fcx.mir.is_none() {
bug!("attempted translation of `{}` w/o MIR", instance);
@ -953,9 +936,7 @@ pub fn trans_ctor_shim<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&ctor_ty.fn_sig());
let fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]);
let (arena, fcx): (TypedArena<_>, FunctionContext);
arena = TypedArena::new();
fcx = FunctionContext::new(ccx, llfndecl, fn_ty, None, &arena);
let fcx = FunctionContext::new(ccx, llfndecl, fn_ty, None);
let bcx = fcx.init(false);
if !fcx.fn_ty.ret.is_ignore() {

View File

@ -16,7 +16,6 @@
pub use self::CalleeData::*;
use arena::TypedArena;
use llvm::{self, ValueRef, get_params};
use rustc::hir::def_id::DefId;
use rustc::ty::subst::Substs;
@ -26,7 +25,7 @@ use attributes;
use base;
use base::*;
use common::{
self, Block, BlockAndBuilder, CrateContext, FunctionContext, SharedCrateContext
self, BlockAndBuilder, CrateContext, FunctionContext, SharedCrateContext
};
use consts;
use declare;
@ -71,25 +70,8 @@ impl<'tcx> Callee<'tcx> {
}
}
/// Trait or impl method call.
pub fn method_call<'blk>(bcx: Block<'blk, 'tcx>,
method_call: ty::MethodCall)
-> Callee<'tcx> {
let method = bcx.tcx().tables().method_map[&method_call];
Callee::method(bcx, method)
}
/// Trait or impl method.
pub fn method<'blk>(bcx: Block<'blk, 'tcx>,
method: ty::MethodCallee<'tcx>) -> Callee<'tcx> {
let substs = bcx.fcx.monomorphize(&method.substs);
Callee::def(bcx.ccx(), method.def_id, substs)
}
/// Function or method definition.
pub fn def<'a>(ccx: &CrateContext<'a, 'tcx>,
def_id: DefId,
substs: &'tcx Substs<'tcx>)
pub fn def<'a>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId, substs: &'tcx Substs<'tcx>)
-> Callee<'tcx> {
let tcx = ccx.tcx();
@ -367,9 +349,7 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>(
let lloncefn = declare::define_internal_fn(ccx, &function_name, llonce_fn_ty);
attributes::set_frame_pointer_elimination(ccx, lloncefn);
let (block_arena, fcx): (TypedArena<_>, FunctionContext);
block_arena = TypedArena::new();
fcx = FunctionContext::new(ccx, lloncefn, fn_ty, None, &block_arena);
let fcx = FunctionContext::new(ccx, lloncefn, fn_ty, None);
let bcx = fcx.init(false);
// the first argument (`self`) will be the (by value) closure env.
@ -518,9 +498,7 @@ fn trans_fn_pointer_shim<'a, 'tcx>(
let llfn = declare::define_internal_fn(ccx, &function_name, tuple_fn_ty);
attributes::set_frame_pointer_elimination(ccx, llfn);
//
let (block_arena, fcx): (TypedArena<_>, FunctionContext);
block_arena = TypedArena::new();
fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &block_arena);
let fcx = FunctionContext::new(ccx, llfn, fn_ty, None);
let bcx = fcx.init(false);
let llargs = get_params(fcx.llfn);
@ -723,17 +701,11 @@ fn trans_call_inner<'a, 'blk, 'tcx>(bcx: BlockAndBuilder<'blk, 'tcx>,
for &llarg in &llargs {
debug!("arg: {:?}", Value(llarg));
}
let normal_bcx = bcx.fcx().new_block("normal-return");
let normal_bcx = bcx.fcx().build_new_block("normal-return");
let landing_pad = bcx.fcx().get_landing_pad();
let llresult = bcx.invoke(
llfn,
&llargs[..],
normal_bcx.llbb,
landing_pad,
lpad,
);
(llresult, normal_bcx.build())
let llresult = bcx.invoke(llfn, &llargs[..], normal_bcx.llbb(), landing_pad, lpad);
(llresult, normal_bcx)
} else {
debug!("calling {:?} at {:?}", Value(llfn), bcx.llbb());
for &llarg in &llargs {

View File

@ -416,14 +416,22 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
UnwindExit(val) => {
// Generate a block that will resume unwinding to the
// calling function
let bcx = self.new_block("resume").build();
let bcx = self.build_new_block("resume");
match val {
UnwindKind::LandingPad => {
let addr = self.landingpad_alloca.get()
.unwrap();
let lp = bcx.load(addr);
Lifetime::End.call(&bcx, addr);
base::trans_unwind_resume(&bcx, lp);
if !bcx.sess().target.target.options.custom_unwind_resume {
bcx.resume(lp);
} else {
let exc_ptr = bcx.extract_value(lp, 0);
bcx.call(
bcx.fcx().eh_unwind_resume().reify(bcx.ccx()),
&[exc_ptr],
bcx.lpad().and_then(|b| b.bundle()));
}
}
UnwindKind::CleanupPad(_) => {
let pad = bcx.cleanup_pad(None, &[]);
@ -481,7 +489,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
let name = scope.block_name("clean");
debug!("generating cleanups for {}", name);
let bcx_in = self.new_block(&name[..]).build();
let bcx_in = self.build_new_block(&name[..]);
let exit_label = label.start(&bcx_in);
let next_llbb = bcx_in.llbb();
let mut bcx_out = bcx_in;
@ -525,7 +533,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
Some(llbb) => return llbb,
None => {
let name = last_scope.block_name("unwind");
pad_bcx = self.new_block(&name[..]).build();
pad_bcx = self.build_new_block(&name[..]);
last_scope.cached_landing_pad = Some(pad_bcx.llbb());
}
}
@ -682,16 +690,17 @@ pub struct DropValue<'tcx> {
impl<'tcx> DropValue<'tcx> {
fn trans<'blk>(&self, bcx: BlockAndBuilder<'blk, 'tcx>) -> BlockAndBuilder<'blk, 'tcx> {
let skip_dtor = self.skip_dtor;
let _icx = if skip_dtor {
base::push_ctxt("<DropValue as Cleanup>::trans skip_dtor=true")
} else {
base::push_ctxt("<DropValue as Cleanup>::trans skip_dtor=false")
};
if self.is_immediate {
glue::drop_ty_immediate(bcx, self.val, self.ty, self.skip_dtor)
let vp = base::alloc_ty(&bcx, self.ty, "");
Lifetime::Start.call(&bcx, vp);
base::store_ty(&bcx, self.val, vp, self.ty);
let lpad = bcx.lpad();
let bcx = glue::call_drop_glue(bcx, vp, self.ty, self.skip_dtor, lpad);
Lifetime::End.call(&bcx, vp);
bcx
} else {
glue::drop_ty_core(bcx, self.val, self.ty, self.skip_dtor)
let lpad = bcx.lpad();
glue::call_drop_glue(bcx, self.val, self.ty, self.skip_dtor, lpad)
}
}
}

View File

@ -305,7 +305,7 @@ pub struct FunctionContext<'a, 'tcx: 'a> {
pub span: Option<Span>,
// The arena that blocks are allocated from.
pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
//pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
// The arena that landing pads are allocated from.
pub lpad_arena: TypedArena<LandingPad>,
@ -333,18 +333,21 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
}
}
pub fn new_block(&'a self,
name: &str)
-> Block<'a, 'tcx> {
pub fn new_block(&'a self, name: &str) -> BasicBlockRef {
unsafe {
let name = CString::new(name).unwrap();
let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
self.llfn,
name.as_ptr());
BlockS::new(llbb, self)
llvm::LLVMAppendBasicBlockInContext(
self.ccx.llcx(),
self.llfn,
name.as_ptr()
)
}
}
pub fn build_new_block(&'a self, name: &str) -> BlockAndBuilder<'a, 'tcx> {
BlockAndBuilder::new(self.new_block(name), self)
}
pub fn monomorphize<T>(&self, value: &T) -> T
where T: TransNormalize<'tcx>
{
@ -441,94 +444,6 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
}
}
// Basic block context. We create a block context for each basic block
// (single-entry, single-exit sequence of instructions) we generate from Rust
// code. Each basic block we generate is attached to a function, typically
// with many basic blocks per function. All the basic blocks attached to a
// function are organized as a directed graph.
#[must_use]
pub struct BlockS<'blk, 'tcx: 'blk> {
// The BasicBlockRef returned from a call to
// llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
// block to the function pointed to by llfn. We insert
// instructions into that block by way of this block context.
// The block pointing to this one in the function's digraph.
pub llbb: BasicBlockRef,
// If this block part of a landing pad, then this is `Some` indicating what
// kind of landing pad its in, otherwise this is none.
pub lpad: Cell<Option<&'blk LandingPad>>,
// The function context for the function to which this block is
// attached.
pub fcx: &'blk FunctionContext<'blk, 'tcx>,
}
pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
pub fn new(llbb: BasicBlockRef,
fcx: &'blk FunctionContext<'blk, 'tcx>)
-> Block<'blk, 'tcx> {
fcx.block_arena.alloc(BlockS {
llbb: llbb,
lpad: Cell::new(None),
fcx: fcx
})
}
pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
self.fcx.ccx
}
pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> {
self.fcx
}
pub fn tcx(&self) -> TyCtxt<'blk, 'tcx, 'tcx> {
self.fcx.ccx.tcx()
}
pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
pub fn lpad(&self) -> Option<&'blk LandingPad> {
self.lpad.get()
}
pub fn set_lpad_ref(&self, lpad: Option<&'blk LandingPad>) {
// FIXME: use an IVar?
self.lpad.set(lpad);
}
pub fn set_lpad(&self, lpad: Option<LandingPad>) {
self.set_lpad_ref(lpad.map(|p| &*self.fcx().lpad_arena.alloc(p)))
}
pub fn mir(&self) -> Ref<'tcx, Mir<'tcx>> {
self.fcx.mir()
}
pub fn name(&self, name: ast::Name) -> String {
name.to_string()
}
pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
self.tcx().map.node_to_string(id).to_string()
}
pub fn to_str(&self) -> String {
format!("[block {:p}]", self)
}
pub fn monomorphize<T>(&self, value: &T) -> T
where T: TransNormalize<'tcx>
{
monomorphize::apply_param_substs(self.fcx.ccx.shared(),
self.fcx.param_substs,
value)
}
pub fn build(&'blk self) -> BlockAndBuilder<'blk, 'tcx> {
BlockAndBuilder::new(self, OwnedBuilder::new_with_ccx(self.ccx()))
}
}
pub struct OwnedBuilder<'blk, 'tcx: 'blk> {
builder: Builder<'blk, 'tcx>
@ -559,77 +474,78 @@ impl<'blk, 'tcx> Drop for OwnedBuilder<'blk, 'tcx> {
#[must_use]
pub struct BlockAndBuilder<'blk, 'tcx: 'blk> {
bcx: Block<'blk, 'tcx>,
// The BasicBlockRef returned from a call to
// llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
// block to the function pointed to by llfn. We insert
// instructions into that block by way of this block context.
// The block pointing to this one in the function's digraph.
llbb: BasicBlockRef,
// If this block part of a landing pad, then this is `Some` indicating what
// kind of landing pad its in, otherwise this is none.
lpad: Cell<Option<&'blk LandingPad>>,
// The function context for the function to which this block is
// attached.
fcx: &'blk FunctionContext<'blk, 'tcx>,
owned_builder: OwnedBuilder<'blk, 'tcx>,
}
impl<'blk, 'tcx> BlockAndBuilder<'blk, 'tcx> {
pub fn new(bcx: Block<'blk, 'tcx>, owned_builder: OwnedBuilder<'blk, 'tcx>) -> Self {
pub fn new(llbb: BasicBlockRef, fcx: &'blk FunctionContext<'blk, 'tcx>) -> Self {
let owned_builder = OwnedBuilder::new_with_ccx(fcx.ccx);
// Set the builder's position to this block's end.
owned_builder.builder.position_at_end(bcx.llbb);
owned_builder.builder.position_at_end(llbb);
BlockAndBuilder {
bcx: bcx,
llbb: llbb,
lpad: Cell::new(None),
fcx: fcx,
owned_builder: owned_builder,
}
}
pub fn with_block<F, R>(&self, f: F) -> R
where F: FnOnce(Block<'blk, 'tcx>) -> R
{
let result = f(self.bcx);
self.position_at_end(self.bcx.llbb);
result
}
pub fn at_start<F, R>(&self, f: F) -> R
where F: FnOnce(&BlockAndBuilder<'blk, 'tcx>) -> R
{
self.position_at_start(self.bcx.llbb);
self.position_at_start(self.llbb);
let r = f(self);
self.position_at_end(self.bcx.llbb);
self.position_at_end(self.llbb);
r
}
// Methods delegated to bcx
pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
self.bcx.ccx()
self.fcx.ccx
}
pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> {
self.bcx.fcx()
self.fcx
}
pub fn tcx(&self) -> TyCtxt<'blk, 'tcx, 'tcx> {
self.bcx.tcx()
self.fcx.ccx.tcx()
}
pub fn sess(&self) -> &'blk Session {
self.bcx.sess()
self.fcx.ccx.sess()
}
pub fn llbb(&self) -> BasicBlockRef {
self.bcx.llbb
self.llbb
}
pub fn mir(&self) -> Ref<'tcx, Mir<'tcx>> {
self.bcx.mir()
}
pub fn monomorphize<T>(&self, value: &T) -> T
where T: TransNormalize<'tcx>
{
self.bcx.monomorphize(value)
self.fcx.mir()
}
pub fn set_lpad(&self, lpad: Option<LandingPad>) {
self.bcx.set_lpad(lpad)
self.set_lpad_ref(lpad.map(|p| &*self.fcx().lpad_arena.alloc(p)))
}
pub fn set_lpad_ref(&self, lpad: Option<&'blk LandingPad>) {
// FIXME: use an IVar?
self.bcx.set_lpad_ref(lpad);
self.lpad.set(lpad);
}
pub fn lpad(&self) -> Option<&'blk LandingPad> {
self.bcx.lpad()
self.lpad.get()
}
}

View File

@ -33,7 +33,6 @@ use type_::Type;
use value::Value;
use Disr;
use arena::TypedArena;
use syntax_pos::DUMMY_SP;
pub fn trans_exchange_free_dyn<'blk, 'tcx>(bcx: BlockAndBuilder<'blk, 'tcx>,
@ -121,19 +120,23 @@ pub fn get_drop_glue_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
}
pub fn drop_ty<'blk, 'tcx>(bcx: BlockAndBuilder<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>) -> BlockAndBuilder<'blk, 'tcx> {
drop_ty_core(bcx, v, t, false)
fn drop_ty<'blk, 'tcx>(
bcx: BlockAndBuilder<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
) -> BlockAndBuilder<'blk, 'tcx> {
call_drop_glue(bcx, v, t, false, None)
}
pub fn drop_ty_core<'blk, 'tcx>(bcx: BlockAndBuilder<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
skip_dtor: bool)
-> BlockAndBuilder<'blk, 'tcx> {
pub fn call_drop_glue<'blk, 'tcx>(
bcx: BlockAndBuilder<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
skip_dtor: bool,
lpad: Option<&'blk LandingPad>,
) -> BlockAndBuilder<'blk, 'tcx> {
// NB: v is an *alias* of type t here, not a direct value.
debug!("drop_ty_core(t={:?}, skip_dtor={})", t, skip_dtor);
debug!("call_drop_glue(t={:?}, skip_dtor={})", t, skip_dtor);
let _icx = push_ctxt("drop_ty");
if bcx.fcx().type_needs_drop(t) {
let ccx = bcx.ccx();
@ -151,25 +154,11 @@ pub fn drop_ty_core<'blk, 'tcx>(bcx: BlockAndBuilder<'blk, 'tcx>,
};
// No drop-hint ==> call standard drop glue
bcx.call(glue, &[ptr], bcx.lpad().and_then(|b| b.bundle()));
bcx.call(glue, &[ptr], lpad.and_then(|b| b.bundle()));
}
bcx
}
pub fn drop_ty_immediate<'blk, 'tcx>(bcx: BlockAndBuilder<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
skip_dtor: bool)
-> BlockAndBuilder<'blk, 'tcx> {
let _icx = push_ctxt("drop_ty_immediate");
let vp = alloc_ty(&bcx, t, "");
Lifetime::Start.call(&bcx, vp);
store_ty(&bcx, v, vp, t);
let bcx = drop_ty_core(bcx, vp, t, skip_dtor);
Lifetime::End.call(&bcx, vp);
bcx
}
pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
get_drop_glue_core(ccx, DropGlueKind::Ty(t))
}
@ -221,9 +210,7 @@ pub fn implement_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
assert_eq!(g.ty(), get_drop_glue_type(tcx, g.ty()));
let (llfn, fn_ty) = ccx.drop_glues().borrow().get(&g).unwrap().clone();
let (arena, fcx): (TypedArena<_>, FunctionContext);
arena = TypedArena::new();
fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &arena);
let fcx = FunctionContext::new(ccx, llfn, fn_ty, None);
let bcx = fcx.init(false);
@ -588,15 +575,15 @@ fn drop_structural_ty<'blk, 'tcx>(cx: BlockAndBuilder<'blk, 'tcx>,
// from the outer function, and any other use case will only
// call this for an already-valid enum in which case the `ret
// void` will never be hit.
let ret_void_cx = fcx.new_block("enum-iter-ret-void").build();
let ret_void_cx = fcx.build_new_block("enum-iter-ret-void");
ret_void_cx.ret_void();
let llswitch = cx.switch(lldiscrim_a, ret_void_cx.llbb(), n_variants);
let next_cx = fcx.new_block("enum-iter-next").build();
let next_cx = fcx.build_new_block("enum-iter-next");
for variant in &adt.variants {
let variant_cx_name = format!("enum-iter-variant-{}",
&variant.disr_val.to_string());
let variant_cx = fcx.new_block(&variant_cx_name).build();
let variant_cx = fcx.build_new_block(&variant_cx_name);
let case_val = adt::trans_case(&cx, t, Disr::from(variant.disr_val));
variant_cx.add_case(llswitch, case_val, variant_cx.llbb());
let variant_cx = iter_variant(variant_cx, t, value, variant, substs);

View File

@ -10,7 +10,6 @@
#![allow(non_upper_case_globals)]
use arena::TypedArena;
use intrinsics::{self, Intrinsic};
use libc;
use llvm;
@ -812,10 +811,10 @@ fn trans_msvc_try<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>,
bcx.set_personality_fn(bcx.fcx().eh_personality());
let normal = bcx.fcx().new_block("normal").build();
let catchswitch = bcx.fcx().new_block("catchswitch").build();
let catchpad = bcx.fcx().new_block("catchpad").build();
let caught = bcx.fcx().new_block("caught").build();
let normal = bcx.fcx().build_new_block("normal");
let catchswitch = bcx.fcx().build_new_block("catchswitch");
let catchpad = bcx.fcx().build_new_block("catchpad");
let caught = bcx.fcx().build_new_block("caught");
let func = llvm::get_param(bcx.fcx().llfn, 0);
let data = llvm::get_param(bcx.fcx().llfn, 1);
@ -930,8 +929,8 @@ fn trans_gnu_try<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>,
// expected to be `*mut *mut u8` for this to actually work, but that's
// managed by the standard library.
let then = bcx.fcx().new_block("then").build();
let catch = bcx.fcx().new_block("catch").build();
let then = bcx.fcx().build_new_block("then");
let catch = bcx.fcx().build_new_block("catch");
let func = llvm::get_param(bcx.fcx().llfn, 0);
let data = llvm::get_param(bcx.fcx().llfn, 1);
@ -978,9 +977,7 @@ fn gen_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>,
sig: ty::Binder(sig)
}));
let llfn = declare::define_internal_fn(ccx, name, rust_fn_ty);
let (fcx, block_arena);
block_arena = TypedArena::new();
fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &block_arena);
let fcx = FunctionContext::new(ccx, llfn, fn_ty, None);
trans(fcx.init(true));
fcx.cleanup();
llfn

View File

@ -9,7 +9,6 @@
// except according to those terms.
use attributes;
use arena::TypedArena;
use llvm::{ValueRef, get_params};
use rustc::traits;
use abi::FnType;
@ -84,9 +83,7 @@ pub fn trans_object_shim<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>,
let llfn = declare::define_internal_fn(ccx, &function_name, callee.ty);
attributes::set_frame_pointer_elimination(ccx, llfn);
let (block_arena, fcx): (TypedArena<_>, FunctionContext);
block_arena = TypedArena::new();
fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &block_arena);
let fcx = FunctionContext::new(ccx, llfn, fn_ty, None);
let mut bcx = fcx.init(false);
let dest = fcx.llretslotptr.get();

View File

@ -16,19 +16,18 @@ use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use rustc::mir::{self, Location, TerminatorKind};
use rustc::mir::visit::{Visitor, LvalueContext};
use rustc::mir::traversal;
use common::{self, Block, BlockAndBuilder};
use common::{self, BlockAndBuilder};
use glue;
use super::rvalue;
pub fn lvalue_locals<'bcx, 'tcx>(bcx: Block<'bcx,'tcx>,
pub fn lvalue_locals<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx,'tcx>,
mir: &mir::Mir<'tcx>) -> BitVector {
let bcx = bcx.build();
let mut analyzer = LocalAnalyzer::new(mir, &bcx);
analyzer.visit_mir(mir);
for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() {
let ty = bcx.monomorphize(&ty);
let ty = bcx.fcx().monomorphize(&ty);
debug!("local {} has type {:?}", index, ty);
if ty.is_scalar() ||
ty.is_unique() ||
@ -142,7 +141,7 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> {
if let mir::Lvalue::Local(_) = proj.base {
let ty = proj.base.ty(self.mir, self.bcx.tcx());
let ty = self.bcx.monomorphize(&ty.to_ty(self.bcx.tcx()));
let ty = self.bcx.fcx().monomorphize(&ty.to_ty(self.bcx.tcx()));
if common::type_is_imm_pair(self.bcx.ccx(), ty) {
if let mir::ProjectionElem::Field(..) = proj.elem {
if let LvalueContext::Consume = context {
@ -172,7 +171,7 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> {
LvalueContext::Drop => {
let ty = lvalue.ty(self.mir, self.bcx.tcx());
let ty = self.bcx.monomorphize(&ty.to_ty(self.bcx.tcx()));
let ty = self.bcx.fcx().monomorphize(&ty.to_ty(self.bcx.tcx()));
// Only need the lvalue if we're actually dropping it.
if glue::type_needs_drop(self.bcx.tcx(), ty) {
@ -200,10 +199,7 @@ pub enum CleanupKind {
Internal { funclet: mir::BasicBlock }
}
pub fn cleanup_kinds<'bcx,'tcx>(_bcx: Block<'bcx,'tcx>,
mir: &mir::Mir<'tcx>)
-> IndexVec<mir::BasicBlock, CleanupKind>
{
pub fn cleanup_kinds<'bcx,'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
mir: &mir::Mir<'tcx>) {
for (bb, data) in mir.basic_blocks().iter_enumerated() {

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm::{self, ValueRef};
use llvm::{self, ValueRef, BasicBlockRef};
use rustc_const_eval::{ErrKind, ConstEvalErr, note_const_eval_err};
use rustc::middle::lang_items;
use rustc::ty::{self, layout};
@ -17,7 +17,7 @@ use abi::{Abi, FnType, ArgType};
use adt;
use base::{self, Lifetime};
use callee::{Callee, CalleeData, Fn, Intrinsic, NamedTupleConstructor, Virtual};
use common::{self, Block, BlockAndBuilder, LandingPad};
use common::{self, BlockAndBuilder, LandingPad};
use common::{C_bool, C_str_slice, C_struct, C_u32, C_undef};
use consts;
use debuginfo::DebugLoc;
@ -28,6 +28,7 @@ use type_of;
use glue;
use type_::Type;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::fx::FxHashMap;
use syntax::symbol::Symbol;
@ -42,18 +43,24 @@ use std::cell::Ref as CellRef;
use std::ptr;
impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
pub fn trans_block(&mut self, bb: mir::BasicBlock) {
let mut bcx = self.bcx(bb);
pub fn trans_block(&mut self, bb: mir::BasicBlock,
lpads: &IndexVec<mir::BasicBlock, Option<LandingPad>>) {
let mut bcx = self.build_block(bb);
let data = &CellRef::clone(&self.mir)[bb];
debug!("trans_block({:?}={:?})", bb, data);
let lpad = match self.cleanup_kinds[bb] {
CleanupKind::Internal { funclet } => lpads[funclet].as_ref(),
_ => lpads[bb].as_ref(),
};
// Create the cleanup bundle, if needed.
let cleanup_pad = bcx.lpad().and_then(|lp| lp.cleanuppad());
let cleanup_bundle = bcx.lpad().and_then(|l| l.bundle());
let cleanup_pad = lpad.and_then(|lp| lp.cleanuppad());
let cleanup_bundle = lpad.and_then(|l| l.bundle());
let funclet_br = |this: &Self, bcx: BlockAndBuilder, bb: mir::BasicBlock| {
let lltarget = this.blocks[bb].llbb;
let lltarget = this.blocks[bb];
if let Some(cp) = cleanup_pad {
match this.cleanup_kinds[bb] {
CleanupKind::Funclet => {
@ -70,7 +77,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
};
let llblock = |this: &mut Self, target: mir::BasicBlock| {
let lltarget = this.blocks[target].llbb;
let lltarget = this.blocks[target];
if let Some(cp) = cleanup_pad {
match this.cleanup_kinds[target] {
@ -79,7 +86,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
debug!("llblock: creating cleanup trampoline for {:?}", target);
let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
let trampoline = this.fcx.new_block(name).build();
let trampoline = this.fcx.build_new_block(name);
trampoline.set_personality_fn(this.fcx.eh_personality());
trampoline.cleanup_ret(cp, Some(lltarget));
trampoline.llbb()
@ -93,7 +100,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
(this.cleanup_kinds[bb], this.cleanup_kinds[target])
{
// jump *into* cleanup - need a landing pad if GNU
this.landing_pad_to(target).llbb
this.landing_pad_to(target)
} else {
lltarget
}
@ -122,7 +129,16 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let ps = self.get_personality_slot(&bcx);
let lp = bcx.load(ps);
Lifetime::End.call(&bcx, ps);
base::trans_unwind_resume(&bcx, lp);
if !bcx.sess().target.target.options.custom_unwind_resume {
bcx.resume(lp);
} else {
let exc_ptr = bcx.extract_value(lp, 0);
bcx.call(
bcx.fcx().eh_unwind_resume().reify(bcx.ccx()),
&[exc_ptr],
cleanup_bundle,
);
}
}
}
@ -158,7 +174,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
// We're generating an exhaustive switch, so the else branch
// can't be hit. Branching to an unreachable instruction
// lets LLVM know this
_ => (None, self.unreachable_block().llbb)
_ => (None, self.unreachable_block())
};
let switch = bcx.switch(discr, default_blk, targets.len());
assert_eq!(adt_def.variants.len(), targets.len());
@ -228,7 +244,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
mir::TerminatorKind::Drop { ref location, target, unwind } => {
let ty = location.ty(&self.mir, bcx.tcx()).to_ty(bcx.tcx());
let ty = bcx.monomorphize(&ty);
let ty = bcx.fcx().monomorphize(&ty);
// Double check for necessity to drop
if !glue::type_needs_drop(bcx.tcx(), ty) {
@ -263,7 +279,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
if let Some(unwind) = unwind {
bcx.invoke(drop_fn,
&[llvalue],
self.blocks[target].llbb,
self.blocks[target],
llblock(self, unwind),
cleanup_bundle);
} else {
@ -304,15 +320,15 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
// Create the failure block and the conditional branch to it.
let lltarget = llblock(self, target);
let panic_block = self.fcx.new_block("panic");
let panic_block = self.fcx.build_new_block("panic");
if expected {
bcx.cond_br(cond, lltarget, panic_block.llbb);
bcx.cond_br(cond, lltarget, panic_block.llbb());
} else {
bcx.cond_br(cond, panic_block.llbb, lltarget);
bcx.cond_br(cond, panic_block.llbb(), lltarget);
}
// After this point, bcx is the block for the call to panic.
bcx = panic_block.build();
bcx = panic_block;
debug_loc.apply_to_bcx(&bcx);
// Get the location information.
@ -385,7 +401,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
if let Some(unwind) = cleanup {
bcx.invoke(llfn,
&args,
self.unreachable_block().llbb,
self.unreachable_block(),
llblock(self, unwind),
cleanup_bundle);
} else {
@ -485,7 +501,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
if let Some(unwind) = *cleanup {
bcx.invoke(drop_fn,
&[llvalue],
self.blocks[target].llbb,
self.blocks[target],
llblock(self, unwind),
cleanup_bundle);
} else {
@ -508,7 +524,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let extra_args = &args[sig.inputs().len()..];
let extra_args = extra_args.iter().map(|op_arg| {
let op_ty = op_arg.ty(&self.mir, bcx.tcx());
bcx.monomorphize(&op_ty)
bcx.fcx().monomorphize(&op_ty)
}).collect::<Vec<_>>();
let fn_ty = callee.direct_fn_type(bcx.ccx(), &extra_args);
@ -621,13 +637,13 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
};
let invokeret = bcx.invoke(fn_ptr,
&llargs,
ret_bcx.llbb,
ret_bcx,
llblock(self, cleanup),
cleanup_bundle);
fn_ty.apply_attrs_callsite(invokeret);
if destination.is_some() {
let ret_bcx = ret_bcx.build();
if let Some((_, target)) = *destination {
let ret_bcx = self.build_block(target);
ret_bcx.at_start(|ret_bcx| {
debug_loc.apply_to_bcx(ret_bcx);
let op = OperandRef {
@ -824,8 +840,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
/// Return the landingpad wrapper around the given basic block
///
/// No-op in MSVC SEH scheme.
fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> Block<'bcx, 'tcx>
{
fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef {
if let Some(block) = self.landing_pads[target_bb] {
return block;
}
@ -834,12 +849,11 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
return self.blocks[target_bb];
}
let target = self.bcx(target_bb);
let target = self.build_block(target_bb);
let block = self.fcx.new_block("cleanup");
self.landing_pads[target_bb] = Some(block);
let bcx = self.fcx.build_new_block("cleanup");
self.landing_pads[target_bb] = Some(bcx.llbb());
let bcx = block.build();
let ccx = bcx.ccx();
let llpersonality = self.fcx.eh_personality();
let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
@ -848,46 +862,47 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let slot = self.get_personality_slot(&bcx);
bcx.store(llretval, slot);
bcx.br(target.llbb());
block
bcx.llbb()
}
pub fn init_cpad(&mut self, bb: mir::BasicBlock) {
let bcx = self.bcx(bb);
pub fn init_cpad(&mut self, bb: mir::BasicBlock,
lpads: &mut IndexVec<mir::BasicBlock, Option<LandingPad>>) {
let bcx = self.build_block(bb);
let data = &self.mir[bb];
debug!("init_cpad({:?})", data);
match self.cleanup_kinds[bb] {
CleanupKind::NotCleanup => {
bcx.set_lpad(None)
lpads[bb] = None;
}
_ if !base::wants_msvc_seh(bcx.sess()) => {
bcx.set_lpad(Some(LandingPad::gnu()))
lpads[bb] = Some(LandingPad::gnu());
}
CleanupKind::Internal { funclet } => {
CleanupKind::Internal { funclet: _ } => {
// FIXME: is this needed?
bcx.set_personality_fn(self.fcx.eh_personality());
bcx.set_lpad_ref(self.bcx(funclet).lpad());
lpads[bb] = None;
}
CleanupKind::Funclet => {
bcx.set_personality_fn(self.fcx.eh_personality());
DebugLoc::None.apply_to_bcx(&bcx);
let cleanup_pad = bcx.cleanup_pad(None, &[]);
bcx.set_lpad(Some(LandingPad::msvc(cleanup_pad)));
lpads[bb] = Some(LandingPad::msvc(cleanup_pad));
}
};
}
fn unreachable_block(&mut self) -> Block<'bcx, 'tcx> {
fn unreachable_block(&mut self) -> BasicBlockRef {
self.unreachable_block.unwrap_or_else(|| {
let bl = self.fcx.new_block("unreachable");
bl.build().unreachable();
self.unreachable_block = Some(bl);
bl
let bl = self.fcx.build_new_block("unreachable");
bl.unreachable();
self.unreachable_block = Some(bl.llbb());
bl.llbb()
})
}
fn bcx(&self, bb: mir::BasicBlock) -> BlockAndBuilder<'bcx, 'tcx> {
self.blocks[bb].build()
fn build_block(&self, bb: mir::BasicBlock) -> BlockAndBuilder<'bcx, 'tcx> {
BlockAndBuilder::new(self.blocks[bb], self.fcx)
}
fn make_return_dest(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,

View File

@ -952,7 +952,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
-> Const<'tcx>
{
debug!("trans_constant({:?})", constant);
let ty = bcx.monomorphize(&constant.ty);
let ty = bcx.fcx().monomorphize(&constant.ty);
let result = match constant.literal.clone() {
mir::Literal::Item { def_id, substs } => {
// Shortcut for zero-sized types, including function item
@ -962,7 +962,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
return Const::new(C_null(llty), ty);
}
let substs = bcx.monomorphize(&substs);
let substs = bcx.fcx().monomorphize(&substs);
let instance = Instance::new(def_id, substs);
MirConstContext::trans_def(bcx.ccx(), instance, IndexVec::new())
}

View File

@ -103,7 +103,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let ptr = self.trans_consume(bcx, base);
let projected_ty = LvalueTy::from_ty(ptr.ty)
.projection_ty(tcx, &mir::ProjectionElem::Deref);
let projected_ty = bcx.monomorphize(&projected_ty);
let projected_ty = bcx.fcx().monomorphize(&projected_ty);
let (llptr, llextra) = match ptr.val {
OperandValue::Immediate(llptr) => (llptr, ptr::null_mut()),
OperandValue::Pair(llptr, llextra) => (llptr, llextra),
@ -118,7 +118,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
mir::Lvalue::Projection(ref projection) => {
let tr_base = self.trans_lvalue(bcx, &projection.base);
let projected_ty = tr_base.ty.projection_ty(tcx, &projection.elem);
let projected_ty = bcx.monomorphize(&projected_ty);
let projected_ty = bcx.fcx().monomorphize(&projected_ty);
let project_index = |llindex| {
let element = if let ty::TySlice(_) = tr_base.ty.to_ty(tcx).sty {

View File

@ -9,13 +9,13 @@
// except according to those terms.
use libc::c_uint;
use llvm::{self, ValueRef};
use rustc::ty::{self, layout};
use llvm::{self, ValueRef, BasicBlockRef};
use rustc::ty;
use rustc::mir;
use rustc::mir::tcx::LvalueTy;
use session::config::FullDebugInfo;
use base;
use common::{self, Block, BlockAndBuilder, CrateContext, FunctionContext, C_null};
use common::{self, BlockAndBuilder, CrateContext, FunctionContext, C_null, LandingPad};
use debuginfo::{self, declare_local, DebugLoc, VariableAccess, VariableKind, FunctionDebugContext};
use type_of;
@ -54,17 +54,17 @@ pub struct MirContext<'bcx, 'tcx:'bcx> {
llpersonalityslot: Option<ValueRef>,
/// A `Block` for each MIR `BasicBlock`
blocks: IndexVec<mir::BasicBlock, Block<'bcx, 'tcx>>,
blocks: IndexVec<mir::BasicBlock, BasicBlockRef>,
/// The funclet status of each basic block
cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
/// This stores the landing-pad block for a given BB, computed lazily on GNU
/// and eagerly on MSVC.
landing_pads: IndexVec<mir::BasicBlock, Option<Block<'bcx, 'tcx>>>,
landing_pads: IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
/// Cached unreachable block
unreachable_block: Option<Block<'bcx, 'tcx>>,
unreachable_block: Option<BasicBlockRef>,
/// The location where each MIR arg/var/tmp/ret is stored. This is
/// usually an `LvalueRef` representing an alloca, but not always:
@ -186,13 +186,11 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
// Analyze the temps to determine which must be lvalues
// FIXME
let (lvalue_locals, cleanup_kinds) = bcx.with_block(|bcx| {
(analyze::lvalue_locals(bcx, &mir),
analyze::cleanup_kinds(bcx, &mir))
});
let lvalue_locals = analyze::lvalue_locals(&bcx, &mir);
let cleanup_kinds = analyze::cleanup_kinds(&mir);
// Allocate a `Block` for every basic block
let block_bcxs: IndexVec<mir::BasicBlock, Block<'blk,'tcx>> =
let block_bcxs: IndexVec<mir::BasicBlock, BasicBlockRef> =
mir.basic_blocks().indices().map(|bb| {
if bb == mir::START_BLOCK {
fcx.new_block("start")
@ -222,7 +220,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
let mut allocate_local = |local| {
let decl = &mir.local_decls[local];
let ty = bcx.monomorphize(&decl.ty);
let ty = bcx.fcx().monomorphize(&decl.ty);
if let Some(name) = decl.name {
// User variable
@ -276,7 +274,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
// Branch to the START block
let start_bcx = mircx.blocks[mir::START_BLOCK];
bcx.br(start_bcx.llbb);
bcx.br(start_bcx);
// Up until here, IR instructions for this function have explicitly not been annotated with
// source code location, so we don't step into call setup code. From here on, source location
@ -287,23 +285,26 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
let mut rpo = traversal::reverse_postorder(&mir);
let mut lpads: IndexVec<mir::BasicBlock, Option<LandingPad>> =
IndexVec::from_elem(None, mir.basic_blocks());
// Prepare each block for translation.
for (bb, _) in rpo.by_ref() {
mircx.init_cpad(bb);
mircx.init_cpad(bb, &mut lpads);
}
rpo.reset();
// Translate the body of each block using reverse postorder
for (bb, _) in rpo {
visited.insert(bb.index());
mircx.trans_block(bb);
mircx.trans_block(bb, &lpads);
}
// Remove blocks that haven't been visited, or have no
// predecessors.
for bb in mir.basic_blocks().indices() {
let block = mircx.blocks[bb];
let block = BasicBlock(block.llbb);
let block = BasicBlock(block);
// Unreachable block
if !visited.contains(bb.index()) {
debug!("trans_mir: block {:?} was not visited", bb);
@ -338,7 +339,7 @@ fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>,
mir.args_iter().enumerate().map(|(arg_index, local)| {
let arg_decl = &mir.local_decls[local];
let arg_ty = bcx.monomorphize(&arg_decl.ty);
let arg_ty = bcx.fcx().monomorphize(&arg_decl.ty);
if Some(local) == mir.spread_arg {
// This argument (e.g. the last argument in the "rust-call" ABI)

View File

@ -197,7 +197,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let llval = [a, b][f.index()];
let op = OperandRef {
val: OperandValue::Immediate(llval),
ty: bcx.monomorphize(&ty)
ty: bcx.fcx().monomorphize(&ty)
};
// Handle nested pairs.

View File

@ -52,7 +52,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}
mir::Rvalue::Cast(mir::CastKind::Unsize, ref source, cast_ty) => {
let cast_ty = bcx.monomorphize(&cast_ty);
let cast_ty = bcx.fcx().monomorphize(&cast_ty);
if common::type_is_fat_ptr(bcx.tcx(), cast_ty) {
// into-coerce of a thin pointer to a fat pointer - just
@ -187,7 +187,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
mir::Rvalue::Cast(ref kind, ref source, cast_ty) => {
let operand = self.trans_operand(&bcx, source);
debug!("cast operand is {:?}", operand);
let cast_ty = bcx.monomorphize(&cast_ty);
let cast_ty = bcx.fcx().monomorphize(&cast_ty);
let val = match *kind {
mir::CastKind::ReifyFnPointer => {
@ -444,7 +444,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}
mir::Rvalue::Box(content_ty) => {
let content_ty: Ty<'tcx> = bcx.monomorphize(&content_ty);
let content_ty: Ty<'tcx> = bcx.fcx().monomorphize(&content_ty);
let llty = type_of::type_of(bcx.ccx(), content_ty);
let llsize = machine::llsize_of(bcx.ccx(), llty);
let align = type_of::align_of(bcx.ccx(), content_ty);

View File

@ -35,9 +35,9 @@ pub fn slice_for_each<'blk, 'tcx, F>(bcx: BlockAndBuilder<'blk, 'tcx>,
bcx.inbounds_gep(a, &[b])
};
let body_bcx = fcx.new_block("slice_loop_body").build();
let next_bcx = fcx.new_block("slice_loop_next").build();
let header_bcx = fcx.new_block("slice_loop_header").build();
let body_bcx = fcx.build_new_block("slice_loop_body");
let next_bcx = fcx.build_new_block("slice_loop_next");
let header_bcx = fcx.build_new_block("slice_loop_header");
let start = if zst {
C_uint(bcx.ccx(), 0usize)