trans::build: Change @mut Block to &Block or &mut Block

Use &mut Block and &Block references where possible in the builder
functions in trans::build.

@mut Block remains in a few functions where I could not (not yet at
least) track down the runtime borrowck failures.
This commit is contained in:
blake2-ppc 2013-09-30 19:37:22 +02:00
parent b88517ec93
commit f4d8d8c122
6 changed files with 129 additions and 116 deletions

View File

@ -1407,7 +1407,10 @@ pub fn cleanup_and_leave(bcx: @mut Block,
}
match leave {
Some(target) => Br(bcx, target),
None => { Resume(bcx, Load(bcx, bcx.fcx.personality.unwrap())); }
None => {
let ll_load = Load(bcx, bcx.fcx.personality.unwrap());
Resume(bcx, ll_load);
}
}
}

View File

@ -23,17 +23,17 @@ use middle::trans::type_::Type;
use std::cast;
use std::libc::{c_uint, c_ulonglong, c_char};
pub fn terminate(cx: @mut Block, _: &str) {
pub fn terminate(cx: &mut Block, _: &str) {
cx.terminated = true;
}
pub fn check_not_terminated(cx: @mut Block) {
pub fn check_not_terminated(cx: &Block) {
if cx.terminated {
fail!("already terminated!");
}
}
pub fn B(cx: @mut Block) -> Builder {
pub fn B(cx: &Block) -> Builder {
let b = cx.fcx.ccx.builder();
b.position_at_end(cx.llbb);
b
@ -47,7 +47,7 @@ pub fn B(cx: @mut Block) -> Builder {
// for (fail/break/return statements, call to diverging functions, etc), and
// further instructions to the block should simply be ignored.
pub fn RetVoid(cx: @mut Block) {
pub fn RetVoid(cx: &mut Block) {
if cx.unreachable { return; }
check_not_terminated(cx);
terminate(cx, "RetVoid");
@ -83,7 +83,7 @@ pub fn CondBr(cx: @mut Block, If: ValueRef, Then: BasicBlockRef,
B(cx).cond_br(If, Then, Else);
}
pub fn Switch(cx: @mut Block, V: ValueRef, Else: BasicBlockRef, NumCases: uint)
pub fn Switch(cx: &mut Block, V: ValueRef, Else: BasicBlockRef, NumCases: uint)
-> ValueRef {
if cx.unreachable { return _Undef(V); }
check_not_terminated(cx);
@ -98,7 +98,7 @@ pub fn AddCase(S: ValueRef, OnVal: ValueRef, Dest: BasicBlockRef) {
}
}
pub fn IndirectBr(cx: @mut Block, Addr: ValueRef, NumDests: uint) {
pub fn IndirectBr(cx: &mut Block, Addr: ValueRef, NumDests: uint) {
if cx.unreachable { return; }
check_not_terminated(cx);
terminate(cx, "IndirectBr");
@ -123,7 +123,7 @@ pub fn Invoke(cx: @mut Block,
B(cx).invoke(Fn, Args, Then, Catch, attributes)
}
pub fn Unreachable(cx: @mut Block) {
pub fn Unreachable(cx: &mut Block) {
if cx.unreachable { return; }
cx.unreachable = true;
if !cx.terminated {
@ -138,177 +138,177 @@ pub fn _Undef(val: ValueRef) -> ValueRef {
}
/* Arithmetic */
pub fn Add(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn Add(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).add(LHS, RHS)
}
pub fn NSWAdd(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn NSWAdd(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).nswadd(LHS, RHS)
}
pub fn NUWAdd(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn NUWAdd(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).nuwadd(LHS, RHS)
}
pub fn FAdd(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn FAdd(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).fadd(LHS, RHS)
}
pub fn Sub(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn Sub(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).sub(LHS, RHS)
}
pub fn NSWSub(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn NSWSub(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).nswsub(LHS, RHS)
}
pub fn NUWSub(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn NUWSub(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).nuwsub(LHS, RHS)
}
pub fn FSub(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn FSub(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).fsub(LHS, RHS)
}
pub fn Mul(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn Mul(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).mul(LHS, RHS)
}
pub fn NSWMul(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn NSWMul(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).nswmul(LHS, RHS)
}
pub fn NUWMul(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn NUWMul(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).nuwmul(LHS, RHS)
}
pub fn FMul(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn FMul(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).fmul(LHS, RHS)
}
pub fn UDiv(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn UDiv(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).udiv(LHS, RHS)
}
pub fn SDiv(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn SDiv(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).sdiv(LHS, RHS)
}
pub fn ExactSDiv(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn ExactSDiv(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).exactsdiv(LHS, RHS)
}
pub fn FDiv(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn FDiv(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).fdiv(LHS, RHS)
}
pub fn URem(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn URem(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).urem(LHS, RHS)
}
pub fn SRem(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn SRem(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).srem(LHS, RHS)
}
pub fn FRem(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn FRem(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).frem(LHS, RHS)
}
pub fn Shl(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn Shl(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).shl(LHS, RHS)
}
pub fn LShr(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn LShr(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).lshr(LHS, RHS)
}
pub fn AShr(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn AShr(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).ashr(LHS, RHS)
}
pub fn And(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn And(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).and(LHS, RHS)
}
pub fn Or(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn Or(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).or(LHS, RHS)
}
pub fn Xor(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn Xor(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).xor(LHS, RHS)
}
pub fn BinOp(cx: @mut Block, Op: Opcode, LHS: ValueRef, RHS: ValueRef)
pub fn BinOp(cx: &Block, Op: Opcode, LHS: ValueRef, RHS: ValueRef)
-> ValueRef {
if cx.unreachable { return _Undef(LHS); }
B(cx).binop(Op, LHS, RHS)
}
pub fn Neg(cx: @mut Block, V: ValueRef) -> ValueRef {
pub fn Neg(cx: &Block, V: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(V); }
B(cx).neg(V)
}
pub fn NSWNeg(cx: @mut Block, V: ValueRef) -> ValueRef {
pub fn NSWNeg(cx: &Block, V: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(V); }
B(cx).nswneg(V)
}
pub fn NUWNeg(cx: @mut Block, V: ValueRef) -> ValueRef {
pub fn NUWNeg(cx: &Block, V: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(V); }
B(cx).nuwneg(V)
}
pub fn FNeg(cx: @mut Block, V: ValueRef) -> ValueRef {
pub fn FNeg(cx: &Block, V: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(V); }
B(cx).fneg(V)
}
pub fn Not(cx: @mut Block, V: ValueRef) -> ValueRef {
pub fn Not(cx: &Block, V: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(V); }
B(cx).not(V)
}
/* Memory */
pub fn Malloc(cx: @mut Block, Ty: Type) -> ValueRef {
pub fn Malloc(cx: &Block, Ty: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); }
B(cx).malloc(Ty)
}
}
pub fn ArrayMalloc(cx: @mut Block, Ty: Type, Val: ValueRef) -> ValueRef {
pub fn ArrayMalloc(cx: &Block, Ty: Type, Val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); }
B(cx).array_malloc(Ty, Val)
}
}
pub fn Alloca(cx: @mut Block, Ty: Type, name: &str) -> ValueRef {
pub fn Alloca(cx: &Block, Ty: Type, name: &str) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Ty.ptr_to().to_ref()); }
let b = cx.fcx.ccx.builder();
@ -317,7 +317,7 @@ pub fn Alloca(cx: @mut Block, Ty: Type, name: &str) -> ValueRef {
}
}
pub fn ArrayAlloca(cx: @mut Block, Ty: Type, Val: ValueRef) -> ValueRef {
pub fn ArrayAlloca(cx: &Block, Ty: Type, Val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Ty.ptr_to().to_ref()); }
let b = cx.fcx.ccx.builder();
@ -326,12 +326,12 @@ pub fn ArrayAlloca(cx: @mut Block, Ty: Type, Val: ValueRef) -> ValueRef {
}
}
pub fn Free(cx: @mut Block, PointerVal: ValueRef) {
pub fn Free(cx: &Block, PointerVal: ValueRef) {
if cx.unreachable { return; }
B(cx).free(PointerVal)
}
pub fn Load(cx: @mut Block, PointerVal: ValueRef) -> ValueRef {
pub fn Load(cx: &Block, PointerVal: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable {
@ -347,7 +347,7 @@ pub fn Load(cx: @mut Block, PointerVal: ValueRef) -> ValueRef {
}
}
pub fn AtomicLoad(cx: @mut Block, PointerVal: ValueRef, order: AtomicOrdering) -> ValueRef {
pub fn AtomicLoad(cx: &Block, PointerVal: ValueRef, order: AtomicOrdering) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable {
@ -358,7 +358,7 @@ pub fn AtomicLoad(cx: @mut Block, PointerVal: ValueRef, order: AtomicOrdering) -
}
pub fn LoadRangeAssert(cx: @mut Block, PointerVal: ValueRef, lo: c_ulonglong,
pub fn LoadRangeAssert(cx: &Block, PointerVal: ValueRef, lo: c_ulonglong,
hi: c_ulonglong, signed: lib::llvm::Bool) -> ValueRef {
if cx.unreachable {
let ccx = cx.fcx.ccx;
@ -376,17 +376,17 @@ pub fn LoadRangeAssert(cx: @mut Block, PointerVal: ValueRef, lo: c_ulonglong,
}
}
pub fn Store(cx: @mut Block, Val: ValueRef, Ptr: ValueRef) {
pub fn Store(cx: &Block, Val: ValueRef, Ptr: ValueRef) {
if cx.unreachable { return; }
B(cx).store(Val, Ptr)
}
pub fn AtomicStore(cx: @mut Block, Val: ValueRef, Ptr: ValueRef, order: AtomicOrdering) {
pub fn AtomicStore(cx: &Block, Val: ValueRef, Ptr: ValueRef, order: AtomicOrdering) {
if cx.unreachable { return; }
B(cx).atomic_store(Val, Ptr, order)
}
pub fn GEP(cx: @mut Block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef {
pub fn GEP(cx: &Block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); }
B(cx).gep(Pointer, Indices)
@ -396,35 +396,35 @@ pub fn GEP(cx: @mut Block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef
// Simple wrapper around GEP that takes an array of ints and wraps them
// in C_i32()
#[inline]
pub fn GEPi(cx: @mut Block, base: ValueRef, ixs: &[uint]) -> ValueRef {
pub fn GEPi(cx: &Block, base: ValueRef, ixs: &[uint]) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); }
B(cx).gepi(base, ixs)
}
}
pub fn InBoundsGEP(cx: @mut Block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef {
pub fn InBoundsGEP(cx: &Block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); }
B(cx).inbounds_gep(Pointer, Indices)
}
}
pub fn StructGEP(cx: @mut Block, Pointer: ValueRef, Idx: uint) -> ValueRef {
pub fn StructGEP(cx: &Block, Pointer: ValueRef, Idx: uint) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); }
B(cx).struct_gep(Pointer, Idx)
}
}
pub fn GlobalString(cx: @mut Block, _Str: *c_char) -> ValueRef {
pub fn GlobalString(cx: &Block, _Str: *c_char) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); }
B(cx).global_string(_Str)
}
}
pub fn GlobalStringPtr(cx: @mut Block, _Str: *c_char) -> ValueRef {
pub fn GlobalStringPtr(cx: &Block, _Str: *c_char) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); }
B(cx).global_string_ptr(_Str)
@ -432,112 +432,112 @@ pub fn GlobalStringPtr(cx: @mut Block, _Str: *c_char) -> ValueRef {
}
/* Casts */
pub fn Trunc(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn Trunc(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).trunc(Val, DestTy)
}
}
pub fn ZExt(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn ZExt(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).zext(Val, DestTy)
}
}
pub fn SExt(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn SExt(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).sext(Val, DestTy)
}
}
pub fn FPToUI(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn FPToUI(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).fptoui(Val, DestTy)
}
}
pub fn FPToSI(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn FPToSI(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).fptosi(Val, DestTy)
}
}
pub fn UIToFP(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn UIToFP(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).uitofp(Val, DestTy)
}
}
pub fn SIToFP(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn SIToFP(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).sitofp(Val, DestTy)
}
}
pub fn FPTrunc(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn FPTrunc(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).fptrunc(Val, DestTy)
}
}
pub fn FPExt(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn FPExt(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).fpext(Val, DestTy)
}
}
pub fn PtrToInt(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn PtrToInt(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).ptrtoint(Val, DestTy)
}
}
pub fn IntToPtr(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn IntToPtr(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).inttoptr(Val, DestTy)
}
}
pub fn BitCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn BitCast(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).bitcast(Val, DestTy)
}
}
pub fn ZExtOrBitCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn ZExtOrBitCast(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).zext_or_bitcast(Val, DestTy)
}
}
pub fn SExtOrBitCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn SExtOrBitCast(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).sext_or_bitcast(Val, DestTy)
}
}
pub fn TruncOrBitCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn TruncOrBitCast(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).trunc_or_bitcast(Val, DestTy)
}
}
pub fn Cast(cx: @mut Block, Op: Opcode, Val: ValueRef, DestTy: Type, _: *u8)
pub fn Cast(cx: &Block, Op: Opcode, Val: ValueRef, DestTy: Type, _: *u8)
-> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
@ -545,21 +545,21 @@ pub fn Cast(cx: @mut Block, Op: Opcode, Val: ValueRef, DestTy: Type, _: *u8)
}
}
pub fn PointerCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn PointerCast(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).pointercast(Val, DestTy)
}
}
pub fn IntCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn IntCast(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).intcast(Val, DestTy)
}
}
pub fn FPCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
pub fn FPCast(cx: &Block, Val: ValueRef, DestTy: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); }
B(cx).fpcast(Val, DestTy)
@ -568,7 +568,7 @@ pub fn FPCast(cx: @mut Block, Val: ValueRef, DestTy: Type) -> ValueRef {
/* Comparisons */
pub fn ICmp(cx: @mut Block, Op: IntPredicate, LHS: ValueRef, RHS: ValueRef)
pub fn ICmp(cx: &Block, Op: IntPredicate, LHS: ValueRef, RHS: ValueRef)
-> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); }
@ -576,7 +576,7 @@ pub fn ICmp(cx: @mut Block, Op: IntPredicate, LHS: ValueRef, RHS: ValueRef)
}
}
pub fn FCmp(cx: @mut Block, Op: RealPredicate, LHS: ValueRef, RHS: ValueRef)
pub fn FCmp(cx: &Block, Op: RealPredicate, LHS: ValueRef, RHS: ValueRef)
-> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); }
@ -585,14 +585,14 @@ pub fn FCmp(cx: @mut Block, Op: RealPredicate, LHS: ValueRef, RHS: ValueRef)
}
/* Miscellaneous instructions */
pub fn EmptyPhi(cx: @mut Block, Ty: Type) -> ValueRef {
pub fn EmptyPhi(cx: &Block, Ty: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Ty.to_ref()); }
B(cx).empty_phi(Ty)
}
}
pub fn Phi(cx: @mut Block, Ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
pub fn Phi(cx: &Block, Ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Ty.to_ref()); }
B(cx).phi(Ty, vals, bbs)
@ -608,7 +608,7 @@ pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
}
}
pub fn _UndefReturn(cx: @mut Block, Fn: ValueRef) -> ValueRef {
pub fn _UndefReturn(cx: &Block, Fn: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
let ty = val_ty(Fn);
@ -622,58 +622,58 @@ pub fn _UndefReturn(cx: @mut Block, Fn: ValueRef) -> ValueRef {
}
}
pub fn add_span_comment(cx: @mut Block, sp: Span, text: &str) {
pub fn add_span_comment(cx: &Block, sp: Span, text: &str) {
B(cx).add_span_comment(sp, text)
}
pub fn add_comment(cx: @mut Block, text: &str) {
pub fn add_comment(cx: &Block, text: &str) {
B(cx).add_comment(text)
}
pub fn InlineAsmCall(cx: @mut Block, asm: *c_char, cons: *c_char,
pub fn InlineAsmCall(cx: &Block, asm: *c_char, cons: *c_char,
inputs: &[ValueRef], output: Type,
volatile: bool, alignstack: bool,
dia: AsmDialect) -> ValueRef {
B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
}
pub fn Call(cx: @mut Block, Fn: ValueRef, Args: &[ValueRef],
pub fn Call(cx: &Block, Fn: ValueRef, Args: &[ValueRef],
attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
if cx.unreachable { return _UndefReturn(cx, Fn); }
B(cx).call(Fn, Args, attributes)
}
pub fn CallWithConv(cx: @mut Block, Fn: ValueRef, Args: &[ValueRef], Conv: CallConv,
pub fn CallWithConv(cx: &Block, Fn: ValueRef, Args: &[ValueRef], Conv: CallConv,
attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
if cx.unreachable { return _UndefReturn(cx, Fn); }
B(cx).call_with_conv(Fn, Args, Conv, attributes)
}
pub fn AtomicFence(cx: @mut Block, order: AtomicOrdering) {
pub fn AtomicFence(cx: &Block, order: AtomicOrdering) {
if cx.unreachable { return; }
B(cx).atomic_fence(order)
}
pub fn Select(cx: @mut Block, If: ValueRef, Then: ValueRef, Else: ValueRef) -> ValueRef {
pub fn Select(cx: &Block, If: ValueRef, Then: ValueRef, Else: ValueRef) -> ValueRef {
if cx.unreachable { return _Undef(Then); }
B(cx).select(If, Then, Else)
}
pub fn VAArg(cx: @mut Block, list: ValueRef, Ty: Type) -> ValueRef {
pub fn VAArg(cx: &Block, list: ValueRef, Ty: Type) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Ty.to_ref()); }
B(cx).va_arg(list, Ty)
}
}
pub fn ExtractElement(cx: @mut Block, VecVal: ValueRef, Index: ValueRef) -> ValueRef {
pub fn ExtractElement(cx: &Block, VecVal: ValueRef, Index: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
B(cx).extract_element(VecVal, Index)
}
}
pub fn InsertElement(cx: @mut Block, VecVal: ValueRef, EltVal: ValueRef,
pub fn InsertElement(cx: &Block, VecVal: ValueRef, EltVal: ValueRef,
Index: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
@ -681,7 +681,7 @@ pub fn InsertElement(cx: @mut Block, VecVal: ValueRef, EltVal: ValueRef,
}
}
pub fn ShuffleVector(cx: @mut Block, V1: ValueRef, V2: ValueRef,
pub fn ShuffleVector(cx: &Block, V1: ValueRef, V2: ValueRef,
Mask: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
@ -689,42 +689,42 @@ pub fn ShuffleVector(cx: @mut Block, V1: ValueRef, V2: ValueRef,
}
}
pub fn VectorSplat(cx: @mut Block, NumElts: uint, EltVal: ValueRef) -> ValueRef {
pub fn VectorSplat(cx: &Block, NumElts: uint, EltVal: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
B(cx).vector_splat(NumElts, EltVal)
}
}
pub fn ExtractValue(cx: @mut Block, AggVal: ValueRef, Index: uint) -> ValueRef {
pub fn ExtractValue(cx: &Block, AggVal: ValueRef, Index: uint) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
B(cx).extract_value(AggVal, Index)
}
}
pub fn InsertValue(cx: @mut Block, AggVal: ValueRef, EltVal: ValueRef, Index: uint) -> ValueRef {
pub fn InsertValue(cx: &Block, AggVal: ValueRef, EltVal: ValueRef, Index: uint) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); }
B(cx).insert_value(AggVal, EltVal, Index)
}
}
pub fn IsNull(cx: @mut Block, Val: ValueRef) -> ValueRef {
pub fn IsNull(cx: &Block, Val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); }
B(cx).is_null(Val)
}
}
pub fn IsNotNull(cx: @mut Block, Val: ValueRef) -> ValueRef {
pub fn IsNotNull(cx: &Block, Val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); }
B(cx).is_not_null(Val)
}
}
pub fn PtrDiff(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
pub fn PtrDiff(cx: &Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable { return llvm::LLVMGetUndef(ccx.int_type.to_ref()); }
@ -732,19 +732,19 @@ pub fn PtrDiff(cx: @mut Block, LHS: ValueRef, RHS: ValueRef) -> ValueRef {
}
}
pub fn Trap(cx: @mut Block) {
pub fn Trap(cx: &Block) {
if cx.unreachable { return; }
B(cx).trap();
}
pub fn LandingPad(cx: @mut Block, Ty: Type, PersFn: ValueRef,
pub fn LandingPad(cx: &Block, Ty: Type, PersFn: ValueRef,
NumClauses: uint) -> ValueRef {
check_not_terminated(cx);
assert!(!cx.unreachable);
B(cx).landing_pad(Ty, PersFn, NumClauses)
}
pub fn SetCleanup(cx: @mut Block, LandingPad: ValueRef) {
pub fn SetCleanup(cx: &Block, LandingPad: ValueRef) {
B(cx).set_cleanup(LandingPad)
}
@ -755,12 +755,12 @@ pub fn Resume(cx: @mut Block, Exn: ValueRef) -> ValueRef {
}
// Atomic Operations
pub fn AtomicCmpXchg(cx: @mut Block, dst: ValueRef,
pub fn AtomicCmpXchg(cx: &Block, dst: ValueRef,
cmp: ValueRef, src: ValueRef,
order: AtomicOrdering) -> ValueRef {
B(cx).atomic_cmpxchg(dst, cmp, src, order)
}
pub fn AtomicRMW(cx: @mut Block, op: AtomicBinOp,
pub fn AtomicRMW(cx: &Block, op: AtomicBinOp,
dst: ValueRef, src: ValueRef,
order: AtomicOrdering) -> ValueRef {
B(cx).atomic_rmw(op, dst, src, order)

View File

@ -550,7 +550,7 @@ pub fn revoke_clean(cx: @mut Block, val: ValueRef) {
}
}
pub fn block_cleanups(bcx: @mut Block) -> ~[cleanup] {
pub fn block_cleanups(bcx: &mut Block) -> ~[cleanup] {
match bcx.scope {
None => ~[],
Some(inf) => inf.cleanups.clone(),
@ -1061,7 +1061,7 @@ pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
r
}
pub fn monomorphize_type(bcx: @mut Block, t: ty::t) -> ty::t {
pub fn monomorphize_type(bcx: &mut Block, t: ty::t) -> ty::t {
match bcx.fcx.param_substs {
Some(substs) => {
ty::subst_tps(bcx.tcx(), substs.tys, substs.self_ty, t)
@ -1074,23 +1074,23 @@ pub fn monomorphize_type(bcx: @mut Block, t: ty::t) -> ty::t {
}
}
pub fn node_id_type(bcx: @mut Block, id: ast::NodeId) -> ty::t {
pub fn node_id_type(bcx: &mut Block, id: ast::NodeId) -> ty::t {
let tcx = bcx.tcx();
let t = ty::node_id_to_type(tcx, id);
monomorphize_type(bcx, t)
}
pub fn expr_ty(bcx: @mut Block, ex: &ast::Expr) -> ty::t {
pub fn expr_ty(bcx: &mut Block, ex: &ast::Expr) -> ty::t {
node_id_type(bcx, ex.id)
}
pub fn expr_ty_adjusted(bcx: @mut Block, ex: &ast::Expr) -> ty::t {
pub fn expr_ty_adjusted(bcx: &mut Block, ex: &ast::Expr) -> ty::t {
let tcx = bcx.tcx();
let t = ty::expr_ty_adjusted(tcx, ex);
monomorphize_type(bcx, t)
}
pub fn node_id_type_params(bcx: @mut Block, id: ast::NodeId) -> ~[ty::t] {
pub fn node_id_type_params(bcx: &mut Block, id: ast::NodeId) -> ~[ty::t] {
let tcx = bcx.tcx();
let params = ty::node_id_to_type_params(tcx, id);

View File

@ -552,12 +552,14 @@ pub fn decr_refcnt_maybe_free(bcx: @mut Block, box_ptr: ValueRef,
let decr_bcx = sub_block(bcx, "decr");
let free_bcx = sub_block(decr_bcx, "free");
let next_bcx = sub_block(bcx, "next");
CondBr(bcx, IsNotNull(bcx, box_ptr), decr_bcx.llbb, next_bcx.llbb);
let llnotnull = IsNotNull(bcx, box_ptr);
CondBr(bcx, llnotnull, decr_bcx.llbb, next_bcx.llbb);
let rc_ptr = GEPi(decr_bcx, box_ptr, [0u, abi::box_field_refcnt]);
let rc = Sub(decr_bcx, Load(decr_bcx, rc_ptr), C_int(ccx, 1));
Store(decr_bcx, rc, rc_ptr);
CondBr(decr_bcx, IsNull(decr_bcx, rc), free_bcx.llbb, next_bcx.llbb);
let llisnull = IsNull(decr_bcx, rc);
CondBr(decr_bcx, llisnull, free_bcx.llbb, next_bcx.llbb);
let free_bcx = match box_ptr_ptr {
Some(p) => free_ty(free_bcx, p, t),

View File

@ -49,7 +49,8 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
args[i] = get_param(bcx.fcx.llfn, first_real_arg + i);
}
let llfn = bcx.ccx().intrinsics.get_copy(&name);
Ret(bcx, Call(bcx, llfn, args.slice(0, num_args), []));
let llcall = Call(bcx, llfn, args.slice(0, num_args), []);
Ret(bcx, llcall);
}
fn with_overflow_instrinsic(bcx: @mut Block, name: &'static str) {
@ -116,7 +117,8 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
let x = get_param(bcx.fcx.llfn, bcx.fcx.arg_pos(0u));
let y = C_i1(false);
let llfn = bcx.ccx().intrinsics.get_copy(&name);
Ret(bcx, Call(bcx, llfn, [x, y], []));
let llcall = Call(bcx, llfn, [x, y], []);
Ret(bcx, llcall);
}
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, item.id));
@ -324,14 +326,19 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
(Pointer, other) | (other, Pointer) if other != Pointer => {
let tmp = Alloca(bcx, llouttype, "");
Store(bcx, llsrcval, PointerCast(bcx, tmp, llintype.ptr_to()));
Ret(bcx, Load(bcx, tmp));
let ll_load = Load(bcx, tmp);
Ret(bcx, ll_load);
}
_ => {
let llbitcast = BitCast(bcx, llsrcval, llouttype);
Ret(bcx, llbitcast)
}
_ => Ret(bcx, BitCast(bcx, llsrcval, llouttype))
}
}
} else if ty::type_is_immediate(ccx.tcx, out_type) {
let llsrcptr = PointerCast(bcx, llsrcval, llouttype.ptr_to());
Ret(bcx, Load(bcx, llsrcptr));
let ll_load = Load(bcx, llsrcptr);
Ret(bcx, ll_load);
} else {
// NB: Do not use a Load and Store here. This causes massive
// code bloat when `transmute` is used on large structural
@ -404,7 +411,8 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
"offset" => {
let ptr = get_param(decl, first_real_arg);
let offset = get_param(decl, first_real_arg + 1);
Ret(bcx, InBoundsGEP(bcx, ptr, [offset]));
let lladdr = InBoundsGEP(bcx, ptr, [offset]);
Ret(bcx, lladdr);
}
"memcpy32" => memcpy_intrinsic(bcx, "llvm.memcpy.p0i8.p0i8.i32", substs.tys[0], 32),
"memcpy64" => memcpy_intrinsic(bcx, "llvm.memcpy.p0i8.p0i8.i64", substs.tys[0], 64),

View File

@ -49,7 +49,7 @@ impl Value {
/// This only performs a search for a trivially dominating store. The store
/// must be the only user of this value, and there must not be any conditional
/// branches between the store and the given block.
pub fn get_dominating_store(self, bcx: @mut Block) -> Option<Value> {
pub fn get_dominating_store(self, bcx: &mut Block) -> Option<Value> {
match self.get_single_user().and_then(|user| user.as_store_inst()) {
Some(store) => {
do store.get_parent().and_then |store_bb| {