trans: Reimplement unwinding on MSVC

This commit transitions the compiler to using the new exception handling
instructions in LLVM for implementing unwinding for MSVC. This affects both 32
and 64-bit MSVC as they're both now using SEH-based strategies. In terms of
standard library support, lots more details about how SEH unwinding is
implemented can be found in the commits.

In terms of trans, this change necessitated a few modifications:

* Branches were added to detect when the old landingpad instruction is used or
  the new cleanuppad instruction is used to `trans::cleanup`.
* The return value from `cleanuppad` is not stored in an `alloca` (because it
  cannot be).
* Each block in trans now has an `Option<LandingPad>` instead of `is_lpad: bool`
  for indicating whether it's in a landing pad or not. The new exception
  handling intrinsics require that on MSVC each `call` inside of a landing pad
  is annotated with which landing pad that it's in. This change to the basic
  block means that whenever a `call` or `invoke` instruction is generated we
  know whether to annotate it as part of a cleanuppad or not.
* Lots of modifications were made to the instruction builders to construct the
  new instructions as well as pass the tagging information for the call/invoke
  instructions.
* The translation of the `try` intrinsics for MSVC has been overhauled to use
  the new `catchpad` instruction. The filter function is now also a
  rustc-generated function instead of a purely libstd-defined function. The
  libstd definition still exists, it just has a stable ABI across architectures
  and leaves some of the really weird implementation details to the compiler
  (e.g. the `localescape` and `localrecover` intrinsics).
This commit is contained in:
Alex Crichton 2015-10-23 18:18:44 -07:00
parent d1cace17af
commit 3e9589c0f4
21 changed files with 1126 additions and 420 deletions

View File

@ -552,7 +552,15 @@ extern "rust-intrinsic" {
pub fn discriminant_value<T>(v: &T) -> u64;
/// Rust's "try catch" construct which invokes the function pointer `f` with
/// the data pointer `data`, returning the exception payload if an exception
/// is thrown (aka the thread panics).
/// the data pointer `data`.
///
/// The third pointer is a target-specific data pointer which is filled in
/// with the specifics of the exception that occurred. For examples on Unix
/// platforms this is a `*mut *mut T` which is filled in by the compiler and
/// on MSVC it's `*mut [usize; 2]`. For more information see the compiler's
/// source as well as std's catch implementation.
#[cfg(not(stage0))]
pub fn try(f: fn(*mut u8), data: *mut u8, local_ptr: *mut u8) -> i32;
#[cfg(stage0)]
pub fn try(f: fn(*mut u8), data: *mut u8) -> *mut u8;
}

View File

@ -13,7 +13,6 @@ use target::Target;
pub fn target() -> Target {
let mut base = super::windows_msvc_base::opts();
base.cpu = "x86-64".to_string();
base.custom_unwind_resume = true;
Target {
llvm_target: "x86_64-pc-windows-msvc".to_string(),

View File

@ -544,6 +544,9 @@ pub type SMDiagnosticRef = *mut SMDiagnostic_opaque;
#[allow(missing_copy_implementations)]
pub enum RustArchiveMember_opaque {}
pub type RustArchiveMemberRef = *mut RustArchiveMember_opaque;
#[allow(missing_copy_implementations)]
pub enum OperandBundleDef_opaque {}
pub type OperandBundleDefRef = *mut OperandBundleDef_opaque;
pub type DiagnosticHandler = unsafe extern "C" fn(DiagnosticInfoRef, *mut c_void);
pub type InlineAsmDiagHandler = unsafe extern "C" fn(SMDiagnosticRef, *const c_void, c_uint);
@ -1149,14 +1152,15 @@ extern {
Addr: ValueRef,
NumDests: c_uint)
-> ValueRef;
pub fn LLVMBuildInvoke(B: BuilderRef,
Fn: ValueRef,
Args: *const ValueRef,
NumArgs: c_uint,
Then: BasicBlockRef,
Catch: BasicBlockRef,
Name: *const c_char)
-> ValueRef;
pub fn LLVMRustBuildInvoke(B: BuilderRef,
Fn: ValueRef,
Args: *const ValueRef,
NumArgs: c_uint,
Then: BasicBlockRef,
Catch: BasicBlockRef,
Bundle: OperandBundleDefRef,
Name: *const c_char)
-> ValueRef;
pub fn LLVMRustBuildLandingPad(B: BuilderRef,
Ty: TypeRef,
PersFn: ValueRef,
@ -1167,6 +1171,31 @@ extern {
pub fn LLVMBuildResume(B: BuilderRef, Exn: ValueRef) -> ValueRef;
pub fn LLVMBuildUnreachable(B: BuilderRef) -> ValueRef;
pub fn LLVMRustBuildCleanupPad(B: BuilderRef,
ParentPad: ValueRef,
ArgCnt: c_uint,
Args: *const ValueRef,
Name: *const c_char) -> ValueRef;
pub fn LLVMRustBuildCleanupRet(B: BuilderRef,
CleanupPad: ValueRef,
UnwindBB: BasicBlockRef) -> ValueRef;
pub fn LLVMRustBuildCatchPad(B: BuilderRef,
ParentPad: ValueRef,
ArgCnt: c_uint,
Args: *const ValueRef,
Name: *const c_char) -> ValueRef;
pub fn LLVMRustBuildCatchRet(B: BuilderRef,
Pad: ValueRef,
BB: BasicBlockRef) -> ValueRef;
pub fn LLVMRustBuildCatchSwitch(Builder: BuilderRef,
ParentPad: ValueRef,
BB: BasicBlockRef,
NumHandlers: c_uint,
Name: *const c_char) -> ValueRef;
pub fn LLVMRustAddHandler(CatchSwitch: ValueRef,
Handler: BasicBlockRef);
pub fn LLVMRustSetPersonalityFn(B: BuilderRef, Pers: ValueRef);
/* Add a case to the switch instruction */
pub fn LLVMAddCase(Switch: ValueRef,
OnVal: ValueRef,
@ -1476,12 +1505,13 @@ extern {
/* Miscellaneous instructions */
pub fn LLVMBuildPhi(B: BuilderRef, Ty: TypeRef, Name: *const c_char)
-> ValueRef;
pub fn LLVMBuildCall(B: BuilderRef,
Fn: ValueRef,
Args: *const ValueRef,
NumArgs: c_uint,
Name: *const c_char)
-> ValueRef;
pub fn LLVMRustBuildCall(B: BuilderRef,
Fn: ValueRef,
Args: *const ValueRef,
NumArgs: c_uint,
Bundle: OperandBundleDefRef,
Name: *const c_char)
-> ValueRef;
pub fn LLVMBuildSelect(B: BuilderRef,
If: ValueRef,
Then: ValueRef,
@ -2126,6 +2156,12 @@ extern {
pub fn LLVMRustSetDataLayoutFromTargetMachine(M: ModuleRef,
TM: TargetMachineRef);
pub fn LLVMRustGetModuleDataLayout(M: ModuleRef) -> TargetDataRef;
pub fn LLVMRustBuildOperandBundleDef(Name: *const c_char,
Inputs: *const ValueRef,
NumInputs: c_uint)
-> OperandBundleDefRef;
pub fn LLVMRustFreeOperandBundleDef(Bundle: OperandBundleDefRef);
}
#[cfg(have_component_x86)]
@ -2418,6 +2454,34 @@ pub fn last_error() -> Option<String> {
}
}
pub struct OperandBundleDef {
inner: OperandBundleDefRef,
}
impl OperandBundleDef {
pub fn new(name: &str, vals: &[ValueRef]) -> OperandBundleDef {
let name = CString::new(name).unwrap();
let def = unsafe {
LLVMRustBuildOperandBundleDef(name.as_ptr(),
vals.as_ptr(),
vals.len() as c_uint)
};
OperandBundleDef { inner: def }
}
pub fn raw(&self) -> OperandBundleDefRef {
self.inner
}
}
impl Drop for OperandBundleDef {
fn drop(&mut self) {
unsafe {
LLVMRustFreeOperandBundleDef(self.inner);
}
}
}
// The module containing the native LLVM dependencies, generated by the build system
// Note that this must come after the rustllvm extern declaration so that
// parts of LLVM that rustllvm depends on aren't thrown away by the linker.

View File

@ -334,7 +334,7 @@ impl<'a> ArchiveBuilder<'a> {
// all SYMDEF files as these are just magical placeholders which get
// re-created when we make a new archive anyway.
for file in archive.iter() {
let file = try!(file.map_err(string2io));
let file = try!(file.map_err(string_to_io_error));
if !is_relevant_child(&file) {
continue
}
@ -455,7 +455,7 @@ impl<'a> ArchiveBuilder<'a> {
unsafe {
if let Some(archive) = self.src_archive() {
for child in archive.iter() {
let child = try!(child.map_err(string2io));
let child = try!(child.map_err(string_to_io_error));
let child_name = match child.name() {
Some(s) => s,
None => continue,
@ -484,7 +484,7 @@ impl<'a> ArchiveBuilder<'a> {
}
Addition::Archive { archive, archive_name: _, mut skip } => {
for child in archive.iter() {
let child = try!(child.map_err(string2io));
let child = try!(child.map_err(string_to_io_error));
if !is_relevant_child(&child) {
continue
}
@ -541,6 +541,6 @@ impl<'a> ArchiveBuilder<'a> {
}
}
fn string2io(s: String) -> io::Error {
fn string_to_io_error(s: String) -> io::Error {
io::Error::new(io::ErrorKind::Other, format!("bad archive: {}", s))
}

View File

@ -958,23 +958,11 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
/// currently uses SEH-ish unwinding with DWARF info tables to the side (same as
/// 64-bit MinGW) instead of "full SEH".
pub fn wants_msvc_seh(sess: &Session) -> bool {
sess.target.target.options.is_like_msvc && sess.target.target.arch == "x86"
sess.target.target.options.is_like_msvc
}
pub fn avoid_invoke(bcx: Block) -> bool {
// FIXME(#25869) currently SEH-based unwinding is pretty buggy in LLVM and
// is being overhauled as this is being written. Until that
// time such that upstream LLVM's implementation is more solid
// and we start binding it we need to skip invokes for any
// target which wants SEH-based unwinding.
if bcx.sess().no_landing_pads() || wants_msvc_seh(bcx.sess()) {
true
} else if bcx.is_lpad {
// Avoid using invoke if we are already inside a landing pad.
true
} else {
false
}
bcx.sess().no_landing_pads() || bcx.lpad.borrow().is_some()
}
pub fn need_invoke(bcx: Block) -> bool {
@ -1122,10 +1110,9 @@ pub fn init_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, local: &hir::Local) -> Blo
}
pub fn raw_block<'blk, 'tcx>(fcx: &'blk FunctionContext<'blk, 'tcx>,
is_lpad: bool,
llbb: BasicBlockRef)
-> Block<'blk, 'tcx> {
common::BlockS::new(llbb, is_lpad, None, fcx)
common::BlockS::new(llbb, None, fcx)
}
pub fn with_cond<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, val: ValueRef, f: F) -> Block<'blk, 'tcx>
@ -1298,7 +1285,7 @@ fn memfill<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>, byte:
let volatile = C_bool(ccx, false);
b.call(llintrinsicfn,
&[llptr, llzeroval, size, align, volatile],
None);
None, None);
}
/// In general, when we create an scratch value in an alloca, the
@ -1372,7 +1359,7 @@ pub fn alloca_dropped<'blk, 'tcx>(cx: Block<'blk, 'tcx>, ty: Ty<'tcx>, name: &st
// Block, which we do not have for `alloca_insert_pt`).
core_lifetime_emit(cx.ccx(), p, Lifetime::Start, |ccx, size, lifetime_start| {
let ptr = b.pointercast(p, Type::i8p(ccx));
b.call(lifetime_start, &[C_u64(ccx, size), ptr], None);
b.call(lifetime_start, &[C_u64(ccx, size), ptr], None, None);
});
memfill(&b, p, ty, adt::DTOR_DONE);
p
@ -1594,7 +1581,7 @@ pub fn new_fn_ctxt<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>,
alloca_insert_pt: Cell::new(None),
llreturn: Cell::new(None),
needs_ret_allocas: nested_returns,
personality: Cell::new(None),
landingpad_alloca: Cell::new(None),
caller_expects_out_pointer: uses_outptr,
lllocals: RefCell::new(NodeMap()),
llupvars: RefCell::new(NodeMap()),
@ -1873,7 +1860,7 @@ pub fn finish_fn<'blk, 'tcx>(fcx: &'blk FunctionContext<'blk, 'tcx>,
if !last_bcx.terminated.get() {
Br(last_bcx, llreturn, DebugLoc::None);
}
raw_block(fcx, false, llreturn)
raw_block(fcx, llreturn)
}
None => last_bcx,
};
@ -2663,11 +2650,12 @@ pub fn create_entry_wrapper(ccx: &CrateContext, sp: Span, main_llfn: ValueRef) {
(rust_main, args)
};
let result = llvm::LLVMBuildCall(bld,
start_fn,
args.as_ptr(),
args.len() as c_uint,
noname());
let result = llvm::LLVMRustBuildCall(bld,
start_fn,
args.as_ptr(),
args.len() as c_uint,
0 as *mut _,
noname());
llvm::LLVMBuildRet(bld, result);
}

View File

@ -150,7 +150,9 @@ pub fn Invoke(cx: Block,
cx.val_to_string(fn_),
args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().join(", "));
debug_loc.apply(cx.fcx);
B(cx).invoke(fn_, args, then, catch, attributes)
let lpad = cx.lpad.borrow();
let bundle = lpad.as_ref().and_then(|b| b.bundle());
B(cx).invoke(fn_, args, then, catch, bundle, attributes)
}
pub fn Unreachable(cx: Block) {
@ -914,7 +916,9 @@ pub fn Call(cx: Block,
return _UndefReturn(cx, fn_);
}
debug_loc.apply(cx.fcx);
B(cx).call(fn_, args, attributes)
let lpad = cx.lpad.borrow();
let bundle = lpad.as_ref().and_then(|b| b.bundle());
B(cx).call(fn_, args, bundle, attributes)
}
pub fn CallWithConv(cx: Block,
@ -928,7 +932,9 @@ pub fn CallWithConv(cx: Block,
return _UndefReturn(cx, fn_);
}
debug_loc.apply(cx.fcx);
B(cx).call_with_conv(fn_, args, conv, attributes)
let lpad = cx.lpad.borrow();
let bundle = lpad.as_ref().and_then(|b| b.bundle());
B(cx).call_with_conv(fn_, args, conv, bundle, attributes)
}
pub fn AtomicFence(cx: Block, order: AtomicOrdering, scope: SynchronizationScope) {
@ -1050,6 +1056,10 @@ pub fn SetCleanup(cx: Block, landing_pad: ValueRef) {
B(cx).set_cleanup(landing_pad)
}
pub fn SetPersonalityFn(cx: Block, f: ValueRef) {
B(cx).set_personality_fn(f)
}
pub fn Resume(cx: Block, exn: ValueRef) -> ValueRef {
check_not_terminated(cx);
terminate(cx, "Resume");
@ -1068,3 +1078,46 @@ pub fn AtomicRMW(cx: Block, op: AtomicBinOp,
order: AtomicOrdering) -> ValueRef {
B(cx).atomic_rmw(op, dst, src, order)
}
pub fn CleanupPad(cx: Block,
parent: Option<ValueRef>,
args: &[ValueRef]) -> ValueRef {
check_not_terminated(cx);
assert!(!cx.unreachable.get());
B(cx).cleanup_pad(parent, args)
}
pub fn CleanupRet(cx: Block,
cleanup: ValueRef,
unwind: Option<BasicBlockRef>) -> ValueRef {
check_not_terminated(cx);
terminate(cx, "CleanupRet");
B(cx).cleanup_ret(cleanup, unwind)
}
pub fn CatchPad(cx: Block,
parent: ValueRef,
args: &[ValueRef]) -> ValueRef {
check_not_terminated(cx);
assert!(!cx.unreachable.get());
B(cx).catch_pad(parent, args)
}
pub fn CatchRet(cx: Block, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
check_not_terminated(cx);
terminate(cx, "CatchRet");
B(cx).catch_ret(pad, unwind)
}
pub fn CatchSwitch(cx: Block,
parent: Option<ValueRef>,
unwind: Option<BasicBlockRef>,
num_handlers: usize) -> ValueRef {
check_not_terminated(cx);
terminate(cx, "CatchSwitch");
B(cx).catch_switch(parent, unwind, num_handlers)
}
pub fn AddHandler(cx: Block, catch_switch: ValueRef, handler: BasicBlockRef) {
B(cx).add_handler(catch_switch, handler)
}

View File

@ -12,7 +12,7 @@
use llvm;
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder};
use llvm::{Opcode, IntPredicate, RealPredicate, False};
use llvm::{Opcode, IntPredicate, RealPredicate, False, OperandBundleDef};
use llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
use trans::base;
use trans::common::*;
@ -158,6 +158,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
args: &[ValueRef],
then: BasicBlockRef,
catch: BasicBlockRef,
bundle: Option<&OperandBundleDef>,
attributes: Option<AttrBuilder>)
-> ValueRef {
self.count_insn("invoke");
@ -169,17 +170,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
.collect::<Vec<String>>()
.join(", "));
let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _);
unsafe {
let v = llvm::LLVMBuildInvoke(self.llbuilder,
llfn,
args.as_ptr(),
args.len() as c_uint,
then,
catch,
noname());
match attributes {
Some(a) => a.apply_callsite(v),
None => {}
let v = llvm::LLVMRustBuildInvoke(self.llbuilder,
llfn,
args.as_ptr(),
args.len() as c_uint,
then,
catch,
bundle,
noname());
if let Some(a) = attributes {
a.apply_callsite(v);
}
v
}
@ -771,7 +774,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
comment_text.as_ptr(), noname(), False,
False)
};
self.call(asm, &[], None);
self.call(asm, &[], None, None);
}
}
@ -796,11 +799,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
unsafe {
let v = llvm::LLVMInlineAsm(
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
self.call(v, inputs, None)
self.call(v, inputs, None, None)
}
}
pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
bundle: Option<&OperandBundleDef>,
attributes: Option<AttrBuilder>) -> ValueRef {
self.count_insn("call");
@ -837,21 +841,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _);
unsafe {
let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),
args.len() as c_uint, noname());
match attributes {
Some(a) => a.apply_callsite(v),
None => {}
let v = llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
args.len() as c_uint, bundle,
noname());
if let Some(a) = attributes {
a.apply_callsite(v);
}
v
}
}
pub fn call_with_conv(&self, llfn: ValueRef, args: &[ValueRef],
conv: CallConv, attributes: Option<AttrBuilder>) -> ValueRef {
conv: CallConv,
bundle: Option<&OperandBundleDef>,
attributes: Option<AttrBuilder>) -> ValueRef {
self.count_insn("callwithconv");
let v = self.call(llfn, args, attributes);
let v = self.call(llfn, args, bundle, attributes);
llvm::SetInstructionCallConv(v, conv);
v
}
@ -948,8 +956,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
assert!((t as isize != 0));
let args: &[ValueRef] = &[];
self.count_insn("trap");
llvm::LLVMBuildCall(
self.llbuilder, t, args.as_ptr(), args.len() as c_uint, noname());
llvm::LLVMRustBuildCall(self.llbuilder, t,
args.as_ptr(), args.len() as c_uint,
0 as *mut _,
noname());
}
}
@ -983,6 +993,86 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
pub fn cleanup_pad(&self,
parent: Option<ValueRef>,
args: &[ValueRef]) -> ValueRef {
self.count_insn("cleanuppad");
let parent = parent.unwrap_or(0 as *mut _);
let name = CString::new("cleanuppad").unwrap();
let ret = unsafe {
llvm::LLVMRustBuildCleanupPad(self.llbuilder,
parent,
args.len() as c_uint,
args.as_ptr(),
name.as_ptr())
};
assert!(!ret.is_null(), "LLVM does not have support for cleanuppad");
return ret
}
pub fn cleanup_ret(&self, cleanup: ValueRef,
unwind: Option<BasicBlockRef>) -> ValueRef {
self.count_insn("cleanupret");
let unwind = unwind.unwrap_or(0 as *mut _);
let ret = unsafe {
llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
};
assert!(!ret.is_null(), "LLVM does not have support for cleanupret");
return ret
}
pub fn catch_pad(&self,
parent: ValueRef,
args: &[ValueRef]) -> ValueRef {
self.count_insn("catchpad");
let name = CString::new("catchpad").unwrap();
let ret = unsafe {
llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
args.len() as c_uint, args.as_ptr(),
name.as_ptr())
};
assert!(!ret.is_null(), "LLVM does not have support for catchpad");
return ret
}
pub fn catch_ret(&self, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
self.count_insn("catchret");
let ret = unsafe {
llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
};
assert!(!ret.is_null(), "LLVM does not have support for catchret");
return ret
}
pub fn catch_switch(&self,
parent: Option<ValueRef>,
unwind: Option<BasicBlockRef>,
num_handlers: usize) -> ValueRef {
self.count_insn("catchswitch");
let parent = parent.unwrap_or(0 as *mut _);
let unwind = unwind.unwrap_or(0 as *mut _);
let name = CString::new("catchswitch").unwrap();
let ret = unsafe {
llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
num_handlers as c_uint,
name.as_ptr())
};
assert!(!ret.is_null(), "LLVM does not have support for catchswitch");
return ret
}
pub fn add_handler(&self, catch_switch: ValueRef, handler: BasicBlockRef) {
unsafe {
llvm::LLVMRustAddHandler(catch_switch, handler);
}
}
pub fn set_personality_fn(&self, personality: ValueRef) {
unsafe {
llvm::LLVMRustSetPersonalityFn(self.llbuilder, personality);
}
}
// Atomic Operations
pub fn atomic_cmpxchg(&self, dst: ValueRef,
cmp: ValueRef, src: ValueRef,

View File

@ -123,7 +123,7 @@ use llvm::{BasicBlockRef, ValueRef};
use trans::base;
use trans::build;
use trans::common;
use trans::common::{Block, FunctionContext, NodeIdAndSpan};
use trans::common::{Block, FunctionContext, NodeIdAndSpan, LandingPad};
use trans::datum::{Datum, Lvalue};
use trans::debuginfo::{DebugLoc, ToDebugLoc};
use trans::glue;
@ -185,11 +185,17 @@ impl<'blk, 'tcx: 'blk> fmt::Debug for CleanupScopeKind<'blk, 'tcx> {
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum EarlyExitLabel {
UnwindExit,
UnwindExit(UnwindKind),
ReturnExit,
LoopExit(ast::NodeId, usize)
}
#[derive(Copy, Clone, Debug)]
pub enum UnwindKind {
LandingPad,
CleanupPad(ValueRef),
}
#[derive(Copy, Clone)]
pub struct CachedEarlyExit {
label: EarlyExitLabel,
@ -372,16 +378,17 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
self.ccx.sess().bug("no loop scope found");
}
/// Returns a block to branch to which will perform all pending cleanups and then
/// break/continue (depending on `exit`) out of the loop with id `cleanup_scope`
/// Returns a block to branch to which will perform all pending cleanups and
/// then break/continue (depending on `exit`) out of the loop with id
/// `cleanup_scope`
fn normal_exit_block(&'blk self,
cleanup_scope: ast::NodeId,
exit: usize) -> BasicBlockRef {
self.trans_cleanups_to_exit_scope(LoopExit(cleanup_scope, exit))
}
/// Returns a block to branch to which will perform all pending cleanups and then return from
/// this function
/// Returns a block to branch to which will perform all pending cleanups and
/// then return from this function
fn return_exit_block(&'blk self) -> BasicBlockRef {
self.trans_cleanups_to_exit_scope(ReturnExit)
}
@ -400,7 +407,8 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
self.schedule_clean(cleanup_scope, drop as CleanupObj);
}
/// Schedules a (deep) drop of `val`, which is a pointer to an instance of `ty`
/// Schedules a (deep) drop of `val`, which is a pointer to an instance of
/// `ty`
fn schedule_drop_mem(&self,
cleanup_scope: ScopeId,
val: ValueRef,
@ -585,8 +593,9 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
self.scopes.borrow().iter().rev().any(|s| s.needs_invoke())
}
/// Returns a basic block to branch to in the event of a panic. This block will run the panic
/// cleanups and eventually invoke the LLVM `Resume` instruction.
/// Returns a basic block to branch to in the event of a panic. This block
/// will run the panic cleanups and eventually resume the exception that
/// caused the landing pad to be run.
fn get_landing_pad(&'blk self) -> BasicBlockRef {
let _icx = base::push_ctxt("get_landing_pad");
@ -682,9 +691,10 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
f(self.scopes.borrow().last().unwrap())
}
/// Used when the caller wishes to jump to an early exit, such as a return, break, continue, or
/// unwind. This function will generate all cleanups between the top of the stack and the exit
/// `label` and return a basic block that the caller can branch to.
/// Used when the caller wishes to jump to an early exit, such as a return,
/// break, continue, or unwind. This function will generate all cleanups
/// between the top of the stack and the exit `label` and return a basic
/// block that the caller can branch to.
///
/// For example, if the current stack of cleanups were as follows:
///
@ -695,15 +705,15 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
/// Custom 2
/// AST 24
///
/// and the `label` specifies a break from `Loop 23`, then this function would generate a
/// series of basic blocks as follows:
/// and the `label` specifies a break from `Loop 23`, then this function
/// would generate a series of basic blocks as follows:
///
/// Cleanup(AST 24) -> Cleanup(Custom 2) -> break_blk
///
/// where `break_blk` is the block specified in `Loop 23` as the target for breaks. The return
/// value would be the first basic block in that sequence (`Cleanup(AST 24)`). The caller could
/// then branch to `Cleanup(AST 24)` and it will perform all cleanups and finally branch to the
/// `break_blk`.
/// where `break_blk` is the block specified in `Loop 23` as the target for
/// breaks. The return value would be the first basic block in that sequence
/// (`Cleanup(AST 24)`). The caller could then branch to `Cleanup(AST 24)`
/// and it will perform all cleanups and finally branch to the `break_blk`.
fn trans_cleanups_to_exit_scope(&'blk self,
label: EarlyExitLabel)
-> BasicBlockRef {
@ -725,21 +735,30 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
loop {
if self.scopes_len() == 0 {
match label {
UnwindExit => {
// Generate a block that will `Resume`.
let prev_bcx = self.new_block(true, "resume", None);
let personality = self.personality.get().expect(
"create_landing_pad() should have set this");
let lp = build::Load(prev_bcx, personality);
base::call_lifetime_end(prev_bcx, personality);
base::trans_unwind_resume(prev_bcx, lp);
prev_llbb = prev_bcx.llbb;
UnwindExit(val) => {
// Generate a block that will resume unwinding to the
// calling function
let bcx = self.new_block("resume", None);
match val {
UnwindKind::LandingPad => {
let addr = self.landingpad_alloca.get()
.unwrap();
let lp = build::Load(bcx, addr);
base::call_lifetime_end(bcx, addr);
base::trans_unwind_resume(bcx, lp);
}
UnwindKind::CleanupPad(_) => {
let pad = build::CleanupPad(bcx, None, &[]);
build::CleanupRet(bcx, pad, None);
}
}
prev_llbb = bcx.llbb;
break;
}
ReturnExit => {
prev_llbb = self.get_llreturn();
break;
break
}
LoopExit(id, _) => {
@ -754,12 +773,9 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
// scope for this label. If so, we can stop popping scopes
// and branch to the cached label, since it contains the
// cleanups for any subsequent scopes.
match self.top_scope(|s| s.cached_early_exit(label)) {
Some(cleanup_block) => {
prev_llbb = cleanup_block;
break;
}
None => { }
if let Some(exit) = self.top_scope(|s| s.cached_early_exit(label)) {
prev_llbb = exit;
break;
}
// Pop off the scope, since we will be generating
@ -769,15 +785,11 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
popped_scopes.push(self.pop_scope());
let scope = popped_scopes.last().unwrap();
match label {
UnwindExit | ReturnExit => { }
UnwindExit(..) | ReturnExit => { }
LoopExit(id, exit) => {
match scope.kind.early_exit_block(id, exit) {
Some(exitllbb) => {
prev_llbb = exitllbb;
break;
}
None => { }
if let Some(exit) = scope.kind.early_exit_block(id, exit) {
prev_llbb = exit;
break
}
}
}
@ -810,18 +822,17 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
if !scope.cleanups.is_empty() {
let name = scope.block_name("clean");
debug!("generating cleanups for {}", name);
let bcx_in = self.new_block(label.is_unwind(),
&name[..],
None);
let bcx_in = self.new_block(&name[..], None);
let exit_label = label.start(bcx_in);
let mut bcx_out = bcx_in;
for cleanup in scope.cleanups.iter().rev() {
bcx_out = cleanup.trans(bcx_out,
scope.debug_loc);
bcx_out = cleanup.trans(bcx_out, scope.debug_loc);
}
build::Br(bcx_out, prev_llbb, DebugLoc::None);
exit_label.branch(bcx_out, prev_llbb);
prev_llbb = bcx_in.llbb;
scope.add_cached_early_exit(label, prev_llbb);
scope.add_cached_early_exit(exit_label, prev_llbb);
}
self.push_scope(scope);
}
@ -832,14 +843,14 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
prev_llbb
}
/// Creates a landing pad for the top scope, if one does not exist. The landing pad will
/// perform all cleanups necessary for an unwind and then `resume` to continue error
/// propagation:
/// Creates a landing pad for the top scope, if one does not exist. The
/// landing pad will perform all cleanups necessary for an unwind and then
/// `resume` to continue error propagation:
///
/// landing_pad -> ... cleanups ... -> [resume]
///
/// (The cleanups and resume instruction are created by `trans_cleanups_to_exit_scope()`, not
/// in this function itself.)
/// (The cleanups and resume instruction are created by
/// `trans_cleanups_to_exit_scope()`, not in this function itself.)
fn get_or_create_landing_pad(&'blk self) -> BasicBlockRef {
let pad_bcx;
@ -850,47 +861,58 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
let mut scopes = self.scopes.borrow_mut();
let last_scope = scopes.last_mut().unwrap();
match last_scope.cached_landing_pad {
Some(llbb) => { return llbb; }
Some(llbb) => return llbb,
None => {
let name = last_scope.block_name("unwind");
pad_bcx = self.new_block(true, &name[..], None);
pad_bcx = self.new_block(&name[..], None);
last_scope.cached_landing_pad = Some(pad_bcx.llbb);
}
}
}
// The landing pad return type (the type being propagated). Not sure what
// this represents but it's determined by the personality function and
// this is what the EH proposal example uses.
let llretty = Type::struct_(self.ccx,
&[Type::i8p(self.ccx), Type::i32(self.ccx)],
false);
};
let llpersonality = pad_bcx.fcx.eh_personality();
// The only landing pad clause will be 'cleanup'
let llretval = build::LandingPad(pad_bcx, llretty, llpersonality, 1);
let val = if base::wants_msvc_seh(self.ccx.sess()) {
// A cleanup pad requires a personality function to be specified, so
// we do that here explicitly (happens implicitly below through
// creation of the landingpad instruction). We then create a
// cleanuppad instruction which has no filters to run cleanup on all
// exceptions.
build::SetPersonalityFn(pad_bcx, llpersonality);
let llretval = build::CleanupPad(pad_bcx, None, &[]);
UnwindKind::CleanupPad(llretval)
} else {
// The landing pad return type (the type being propagated). Not sure
// what this represents but it's determined by the personality
// function and this is what the EH proposal example uses.
let llretty = Type::struct_(self.ccx,
&[Type::i8p(self.ccx), Type::i32(self.ccx)],
false);
// The landing pad block is a cleanup
build::SetCleanup(pad_bcx, llretval);
// The only landing pad clause will be 'cleanup'
let llretval = build::LandingPad(pad_bcx, llretty, llpersonality, 1);
// We store the retval in a function-central alloca, so that calls to
// Resume can find it.
match self.personality.get() {
Some(addr) => {
build::Store(pad_bcx, llretval, addr);
}
None => {
let addr = base::alloca(pad_bcx, common::val_ty(llretval), "");
base::call_lifetime_start(pad_bcx, addr);
self.personality.set(Some(addr));
build::Store(pad_bcx, llretval, addr);
}
}
// The landing pad block is a cleanup
build::SetCleanup(pad_bcx, llretval);
let addr = match self.landingpad_alloca.get() {
Some(addr) => addr,
None => {
let addr = base::alloca(pad_bcx, common::val_ty(llretval),
"");
base::call_lifetime_start(pad_bcx, addr);
self.landingpad_alloca.set(Some(addr));
addr
}
};
build::Store(pad_bcx, llretval, addr);
UnwindKind::LandingPad
};
// Generate the cleanup block and branch to it.
let cleanup_llbb = self.trans_cleanups_to_exit_scope(UnwindExit);
build::Br(pad_bcx, cleanup_llbb, DebugLoc::None);
let label = UnwindExit(val);
let cleanup_llbb = self.trans_cleanups_to_exit_scope(label);
label.branch(pad_bcx, cleanup_llbb);
return pad_bcx.llbb;
}
@ -992,10 +1014,53 @@ impl<'blk, 'tcx> CleanupScopeKind<'blk, 'tcx> {
}
impl EarlyExitLabel {
fn is_unwind(&self) -> bool {
/// Generates a branch going from `from_bcx` to `to_llbb` where `self` is
/// the exit label attached to the start of `from_bcx`.
///
/// Transitions from an exit label to other exit labels depend on the type
/// of label. For example with MSVC exceptions unwind exit labels will use
/// the `cleanupret` instruction instead of the `br` instruction.
fn branch(&self, from_bcx: Block, to_llbb: BasicBlockRef) {
if let UnwindExit(UnwindKind::CleanupPad(pad)) = *self {
build::CleanupRet(from_bcx, pad, Some(to_llbb));
} else {
build::Br(from_bcx, to_llbb, DebugLoc::None);
}
}
/// Generates the necessary instructions at the start of `bcx` to prepare
/// for the same kind of early exit label that `self` is.
///
/// This function will appropriately configure `bcx` based on the kind of
/// label this is. For UnwindExit labels, the `lpad` field of the block will
/// be set to `Some`, and for MSVC exceptions this function will generate a
/// `cleanuppad` instruction at the start of the block so it may be jumped
/// to in the future (e.g. so this block can be cached as an early exit).
///
/// Returns a new label which will can be used to cache `bcx` in the list of
/// early exits.
fn start(&self, bcx: Block) -> EarlyExitLabel {
match *self {
UnwindExit => true,
_ => false
UnwindExit(UnwindKind::CleanupPad(..)) => {
let pad = build::CleanupPad(bcx, None, &[]);
*bcx.lpad.borrow_mut() = Some(LandingPad::msvc(pad));
UnwindExit(UnwindKind::CleanupPad(pad))
}
UnwindExit(UnwindKind::LandingPad) => {
*bcx.lpad.borrow_mut() = Some(LandingPad::gnu());
*self
}
label => label,
}
}
}
impl PartialEq for UnwindKind {
fn eq(&self, val: &UnwindKind) -> bool {
match (*self, *val) {
(UnwindKind::LandingPad, UnwindKind::LandingPad) |
(UnwindKind::CleanupPad(..), UnwindKind::CleanupPad(..)) => true,
_ => false,
}
}
}

View File

@ -17,7 +17,7 @@ pub use self::ExprOrMethodCall::*;
use session::Session;
use llvm;
use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef, TypeKind};
use llvm::{True, False, Bool};
use llvm::{True, False, Bool, OperandBundleDef};
use middle::cfg;
use middle::def::Def;
use middle::def_id::DefId;
@ -326,9 +326,13 @@ pub struct FunctionContext<'a, 'tcx: 'a> {
// we use a separate alloca for each return
pub needs_ret_allocas: bool,
// The a value alloca'd for calls to upcalls.rust_personality. Used when
// outputting the resume instruction.
pub personality: Cell<Option<ValueRef>>,
// When working with landingpad-based exceptions this value is alloca'd and
// later loaded when using the resume instruction. This ends up being
// critical to chaining landing pads and resuing already-translated
// cleanups.
//
// Note that for cleanuppad-based exceptions this is not used.
pub landingpad_alloca: Cell<Option<ValueRef>>,
// True if the caller expects this fn to use the out pointer to
// return. Either way, your code should write into the slot llretslotptr
@ -424,7 +428,6 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
}
pub fn new_block(&'a self,
is_lpad: bool,
name: &str,
opt_node_id: Option<ast::NodeId>)
-> Block<'a, 'tcx> {
@ -433,7 +436,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
self.llfn,
name.as_ptr());
BlockS::new(llbb, is_lpad, opt_node_id, self)
BlockS::new(llbb, opt_node_id, self)
}
}
@ -441,13 +444,13 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
name: &str,
node_id: ast::NodeId)
-> Block<'a, 'tcx> {
self.new_block(false, name, Some(node_id))
self.new_block(name, Some(node_id))
}
pub fn new_temp_block(&'a self,
name: &str)
-> Block<'a, 'tcx> {
self.new_block(false, name, None)
self.new_block(name, None)
}
pub fn join_blocks(&'a self,
@ -577,8 +580,9 @@ pub struct BlockS<'blk, 'tcx: 'blk> {
pub terminated: Cell<bool>,
pub unreachable: Cell<bool>,
// Is this block part of a landing pad?
pub is_lpad: bool,
// If this block part of a landing pad, then this is `Some` indicating what
// kind of landing pad its in, otherwise this is none.
pub lpad: RefCell<Option<LandingPad>>,
// AST node-id associated with this block, if any. Used for
// debugging purposes only.
@ -593,7 +597,6 @@ pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
pub fn new(llbb: BasicBlockRef,
is_lpad: bool,
opt_node_id: Option<ast::NodeId>,
fcx: &'blk FunctionContext<'blk, 'tcx>)
-> Block<'blk, 'tcx> {
@ -601,7 +604,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
llbb: llbb,
terminated: Cell::new(false),
unreachable: Cell::new(false),
is_lpad: is_lpad,
lpad: RefCell::new(None),
opt_node_id: opt_node_id,
fcx: fcx
})
@ -658,6 +661,53 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
}
}
/// A structure representing an active landing pad for the duration of a basic
/// block.
///
/// Each `Block` may contain an instance of this, indicating whether the block
/// is part of a landing pad or not. This is used to make decision about whether
/// to emit `invoke` instructions (e.g. in a landing pad we don't continue to
/// use `invoke`) and also about various function call metadata.
///
/// For GNU exceptions (`landingpad` + `resume` instructions) this structure is
/// just a bunch of `None` instances (not too interesting), but for MSVC
/// exceptions (`cleanuppad` + `cleanupret` instructions) this contains data.
/// When inside of a landing pad, each function call in LLVM IR needs to be
/// annotated with which landing pad it's a part of. This is accomplished via
/// the `OperandBundleDef` value created for MSVC landing pads.
pub struct LandingPad {
cleanuppad: Option<ValueRef>,
operand: Option<OperandBundleDef>,
}
impl LandingPad {
pub fn gnu() -> LandingPad {
LandingPad { cleanuppad: None, operand: None }
}
pub fn msvc(cleanuppad: ValueRef) -> LandingPad {
LandingPad {
cleanuppad: Some(cleanuppad),
operand: Some(OperandBundleDef::new("funclet", &[cleanuppad])),
}
}
pub fn bundle(&self) -> Option<&OperandBundleDef> {
self.operand.as_ref()
}
}
impl Clone for LandingPad {
fn clone(&self) -> LandingPad {
LandingPad {
cleanuppad: self.cleanuppad,
operand: self.cleanuppad.map(|p| {
OperandBundleDef::new("funclet", &[p])
}),
}
}
}
pub struct Result<'blk, 'tcx: 'blk> {
pub bcx: Block<'blk, 'tcx>,
pub val: ValueRef

View File

@ -833,6 +833,16 @@ fn declare_intrinsic(ccx: &CrateContext, key: &str) -> Option<ValueRef> {
return Some(f);
}
);
($name:expr, fn(...) -> $ret:expr) => (
if key == $name {
let f = declare::declare_cfn(ccx, $name,
Type::variadic_func(&[], &$ret),
ccx.tcx().mk_nil());
llvm::SetUnnamedAddr(f, false);
ccx.intrinsics().borrow_mut().insert($name, f.clone());
return Some(f);
}
);
($name:expr, fn($($arg:expr),*) -> $ret:expr) => (
if key == $name {
let f = declare::declare_cfn(ccx, $name, Type::func(&[$($arg),*], &$ret),
@ -841,7 +851,7 @@ fn declare_intrinsic(ccx: &CrateContext, key: &str) -> Option<ValueRef> {
ccx.intrinsics().borrow_mut().insert($name, f.clone());
return Some(f);
}
)
);
}
macro_rules! mk_struct {
($($field_ty:expr),*) => (Type::struct_(ccx, &[$($field_ty),*], false))
@ -869,6 +879,7 @@ fn declare_intrinsic(ccx: &CrateContext, key: &str) -> Option<ValueRef> {
ifn!("llvm.trap", fn() -> void);
ifn!("llvm.debugtrap", fn() -> void);
ifn!("llvm.frameaddress", fn(t_i32) -> i8p);
ifn!("llvm.powi.f32", fn(t_f32, t_i32) -> t_f32);
ifn!("llvm.powi.f64", fn(t_f64, t_i32) -> t_f64);
@ -969,6 +980,9 @@ fn declare_intrinsic(ccx: &CrateContext, key: &str) -> Option<ValueRef> {
ifn!("llvm.expect.i1", fn(i1, i1) -> i1);
ifn!("llvm.eh.typeid.for", fn(i8p) -> t_i32);
ifn!("llvm.localescape", fn(...) -> void);
ifn!("llvm.localrecover", fn(i8p, i8p, t_i32) -> i8p);
ifn!("llvm.x86.seh.recoverfp", fn(i8p, i8p) -> i8p);
// Some intrinsics were introduced in later versions of LLVM, but they have
// fallbacks in libc or libm and such.

View File

@ -844,7 +844,8 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
debug!("calling llrustfn = {}, t = {:?}",
ccx.tn().val_to_string(llrustfn), t);
let attributes = attributes::from_fn_type(ccx, t);
let llrust_ret_val = builder.call(llrustfn, &llrust_args, Some(attributes));
let llrust_ret_val = builder.call(llrustfn, &llrust_args,
None, Some(attributes));
// Get the return value where the foreign fn expects it.
let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast {

View File

@ -316,11 +316,11 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
// For `try` we need some custom control flow
if &name[..] == "try" {
if let callee::ArgExprs(ref exprs) = args {
let (func, data) = if exprs.len() != 2 {
ccx.sess().bug("expected two exprs as arguments for \
let (func, data, local_ptr) = if exprs.len() != 3 {
ccx.sess().bug("expected three exprs as arguments for \
`try` intrinsic");
} else {
(&exprs[0], &exprs[1])
(&exprs[0], &exprs[1], &exprs[2])
};
// translate arguments
@ -328,6 +328,9 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let func = unpack_datum!(bcx, func.to_rvalue_datum(bcx, "func"));
let data = unpack_datum!(bcx, expr::trans(bcx, data));
let data = unpack_datum!(bcx, data.to_rvalue_datum(bcx, "data"));
let local_ptr = unpack_datum!(bcx, expr::trans(bcx, local_ptr));
let local_ptr = local_ptr.to_rvalue_datum(bcx, "local_ptr");
let local_ptr = unpack_datum!(bcx, local_ptr);
let dest = match dest {
expr::SaveIn(d) => d,
@ -336,7 +339,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
};
// do the invoke
bcx = try_intrinsic(bcx, func.val, data.val, dest,
bcx = try_intrinsic(bcx, func.val, data.val, local_ptr.val, dest,
call_debug_location);
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
@ -1045,6 +1048,7 @@ fn with_overflow_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
fn try_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
func: ValueRef,
data: ValueRef,
local_ptr: ValueRef,
dest: ValueRef,
dloc: DebugLoc) -> Block<'blk, 'tcx> {
if bcx.sess().no_landing_pads() {
@ -1052,142 +1056,115 @@ fn try_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
Store(bcx, C_null(Type::i8p(bcx.ccx())), dest);
bcx
} else if wants_msvc_seh(bcx.sess()) {
trans_msvc_try(bcx, func, data, dest, dloc)
trans_msvc_try(bcx, func, data, local_ptr, dest, dloc)
} else {
trans_gnu_try(bcx, func, data, dest, dloc)
trans_gnu_try(bcx, func, data, local_ptr, dest, dloc)
}
}
// MSVC's definition of the `rust_try` function. The exact implementation here
// is a little different than the GNU (standard) version below, not only because
// of the personality function but also because of the other fiddly bits about
// SEH. LLVM also currently requires us to structure this in a very particular
// way as explained below.
// MSVC's definition of the `rust_try` function.
//
// Like with the GNU version we generate a shim wrapper
// This implementation uses the new exception handling instructions in LLVM
// which have support in LLVM for SEH on MSVC targets. Although these
// instructions are meant to work for all targets, as of the time of this
// writing, however, LLVM does not recommend the usage of these new instructions
// as the old ones are still more optimized.
fn trans_msvc_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
func: ValueRef,
data: ValueRef,
local_ptr: ValueRef,
dest: ValueRef,
dloc: DebugLoc) -> Block<'blk, 'tcx> {
let llfn = get_rust_try_fn(bcx.fcx, &mut |try_fn_ty, output| {
let llfn = get_rust_try_fn(bcx.fcx, &mut |bcx| {
let ccx = bcx.ccx();
let dloc = DebugLoc::None;
let rust_try = declare::define_internal_rust_fn(ccx, "__rust_try",
try_fn_ty);
let (fcx, block_arena);
block_arena = TypedArena::new();
fcx = new_fn_ctxt(ccx, rust_try, ast::DUMMY_NODE_ID, false,
output, ccx.tcx().mk_substs(Substs::trans_empty()),
None, &block_arena);
let bcx = init_function(&fcx, true, output);
let then = fcx.new_temp_block("then");
let catch = fcx.new_temp_block("catch");
let catch_return = fcx.new_temp_block("catch-return");
let catch_resume = fcx.new_temp_block("catch-resume");
let personality = fcx.eh_personality();
let eh_typeid_for = ccx.get_intrinsic(&"llvm.eh.typeid.for");
let rust_try_filter = match bcx.tcx().lang_items.msvc_try_filter() {
Some(did) => callee::trans_fn_ref(ccx, did, ExprId(0),
bcx.fcx.param_substs).val,
None => bcx.sess().bug("msvc_try_filter not defined"),
};
SetPersonalityFn(bcx, bcx.fcx.eh_personality());
// Type indicator for the exception being thrown, not entirely sure
// what's going on here but it's what all the examples in LLVM use.
let lpad_ty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)],
false);
let normal = bcx.fcx.new_temp_block("normal");
let catchswitch = bcx.fcx.new_temp_block("catchswitch");
let catchpad = bcx.fcx.new_temp_block("catchpad");
let caught = bcx.fcx.new_temp_block("caught");
llvm::SetFunctionAttribute(rust_try, llvm::Attribute::NoInline);
llvm::SetFunctionAttribute(rust_try, llvm::Attribute::OptimizeNone);
let func = llvm::get_param(rust_try, 0);
let data = llvm::get_param(rust_try, 1);
let func = llvm::get_param(bcx.fcx.llfn, 0);
let data = llvm::get_param(bcx.fcx.llfn, 1);
let local_ptr = llvm::get_param(bcx.fcx.llfn, 2);
// Invoke the function, specifying our two temporary landing pads as the
// ext point. After the invoke we've terminated our basic block.
Invoke(bcx, func, &[data], then.llbb, catch.llbb, None, dloc);
// All the magic happens in this landing pad, and this is basically the
// only landing pad in rust tagged with "catch" to indicate that we're
// catching an exception. The other catch handlers in the GNU version
// below just catch *all* exceptions, but that's because most exceptions
// are already filtered out by the gnu personality function.
// We're generating an IR snippet that looks like:
//
// For MSVC we're just using a standard personality function that we
// can't customize (e.g. _except_handler3 or __C_specific_handler), so
// we need to do the exception filtering ourselves. This is currently
// performed by the `__rust_try_filter` function. This function,
// specified in the landingpad instruction, will be invoked by Windows
// SEH routines and will return whether the exception in question can be
// caught (aka the Rust runtime is the one that threw the exception).
// declare i32 @rust_try(%func, %data, %ptr) {
// %slot = alloca i8*
// call @llvm.localescape(%slot)
// store %ptr, %slot
// invoke %func(%data) to label %normal unwind label %catchswitch
//
// To get this to compile (currently LLVM segfaults if it's not in this
// particular structure), when the landingpad is executing we test to
// make sure that the ID of the exception being thrown is indeed the one
// that we were expecting. If it's not, we resume the exception, and
// otherwise we return the pointer that we got Full disclosure: It's not
// clear to me what this `llvm.eh.typeid` stuff is doing *other* then
// just allowing LLVM to compile this file without segfaulting. I would
// expect the entire landing pad to just be:
// normal:
// ret i32 0
//
// %vals = landingpad ...
// %ehptr = extractvalue { i8*, i32 } %vals, 0
// ret i8* %ehptr
// catchswitch:
// %cs = catchswitch within none [%catchpad] unwind to caller
//
// but apparently LLVM chokes on this, so we do the more complicated
// thing to placate it.
let vals = LandingPad(catch, lpad_ty, personality, 1);
let rust_try_filter = BitCast(catch, rust_try_filter, Type::i8p(ccx));
AddClause(catch, vals, rust_try_filter);
let ehptr = ExtractValue(catch, vals, 0);
let sel = ExtractValue(catch, vals, 1);
let filter_sel = Call(catch, eh_typeid_for, &[rust_try_filter], None,
dloc);
let is_filter = ICmp(catch, llvm::IntEQ, sel, filter_sel, dloc);
CondBr(catch, is_filter, catch_return.llbb, catch_resume.llbb, dloc);
// catchpad:
// %tok = catchpad within %cs [%rust_try_filter]
// catchret from %tok to label %caught
//
// caught:
// ret i32 1
// }
//
// This structure follows the basic usage of the instructions in LLVM
// (see their documentation/test cases for examples), but a
// perhaps-surprising part here is the usage of the `localescape`
// intrinsic. This is used to allow the filter function (also generated
// here) to access variables on the stack of this intrinsic. This
// ability enables us to transfer information about the exception being
// thrown to this point, where we're catching the exception.
//
// More information can be found in libstd's seh.rs implementation.
let slot = Alloca(bcx, Type::i8p(ccx), "slot");
let localescape = ccx.get_intrinsic(&"llvm.localescape");
Call(bcx, localescape, &[slot], None, dloc);
Store(bcx, local_ptr, slot);
Invoke(bcx, func, &[data], normal.llbb, catchswitch.llbb, None, dloc);
// Our "catch-return" basic block is where we've determined that we
// actually need to catch this exception, in which case we just return
// the exception pointer.
Ret(catch_return, ehptr, dloc);
Ret(normal, C_i32(ccx, 0), dloc);
// The "catch-resume" block is where we're running this landing pad but
// we actually need to not catch the exception, so just resume the
// exception to return.
trans_unwind_resume(catch_resume, vals);
let cs = CatchSwitch(catchswitch, None, None, 1);
AddHandler(catchswitch, cs, catchpad.llbb);
// On the successful branch we just return null.
Ret(then, C_null(Type::i8p(ccx)), dloc);
let filter = generate_filter_fn(bcx.fcx, bcx.fcx.llfn);
let filter = BitCast(catchpad, filter, Type::i8p(ccx));
let tok = CatchPad(catchpad, cs, &[filter]);
CatchRet(catchpad, tok, caught.llbb);
return rust_try
Ret(caught, C_i32(ccx, 1), dloc);
});
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = Call(bcx, llfn, &[func, data], None, dloc);
let ret = Call(bcx, llfn, &[func, data, local_ptr], None, dloc);
Store(bcx, ret, dest);
return bcx;
return bcx
}
// Definition of the standard "try" function for Rust using the GNU-like model
// of exceptions (e.g. the normal semantics of LLVM's landingpad and invoke
// instructions).
//
// This translation is a little surprising because
// we always call a shim function instead of inlining the call to `invoke`
// manually here. This is done because in LLVM we're only allowed to have one
// personality per function definition. The call to the `try` intrinsic is
// being inlined into the function calling it, and that function may already
// have other personality functions in play. By calling a shim we're
// guaranteed that our shim will have the right personality function.
//
// This translation is a little surprising because we always call a shim
// function instead of inlining the call to `invoke` manually here. This is done
// because in LLVM we're only allowed to have one personality per function
// definition. The call to the `try` intrinsic is being inlined into the
// function calling it, and that function may already have other personality
// functions in play. By calling a shim we're guaranteed that our shim will have
// the right personality function.
fn trans_gnu_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
func: ValueRef,
data: ValueRef,
local_ptr: ValueRef,
dest: ValueRef,
dloc: DebugLoc) -> Block<'blk, 'tcx> {
let llfn = get_rust_try_fn(bcx.fcx, &mut |try_fn_ty, output| {
let llfn = get_rust_try_fn(bcx.fcx, &mut |bcx| {
let ccx = bcx.ccx();
let dloc = DebugLoc::None;
@ -1197,60 +1174,82 @@ fn trans_gnu_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// invoke %func(%args...) normal %normal unwind %catch
//
// normal:
// ret null
// ret 0
//
// catch:
// (ptr, _) = landingpad
// ret ptr
// store ptr, %local_ptr
// ret 1
//
// Note that the `local_ptr` data passed into the `try` intrinsic is
// expected to be `*mut *mut u8` for this to actually work, but that's
// managed by the standard library.
let rust_try = declare::define_internal_rust_fn(ccx, "__rust_try", try_fn_ty);
attributes::emit_uwtable(rust_try, true);
attributes::emit_uwtable(bcx.fcx.llfn, true);
let catch_pers = match bcx.tcx().lang_items.eh_personality_catch() {
Some(did) => callee::trans_fn_ref(ccx, did, ExprId(0),
bcx.fcx.param_substs).val,
None => bcx.tcx().sess.bug("eh_personality_catch not defined"),
};
let (fcx, block_arena);
block_arena = TypedArena::new();
fcx = new_fn_ctxt(ccx, rust_try, ast::DUMMY_NODE_ID, false,
output, ccx.tcx().mk_substs(Substs::trans_empty()),
None, &block_arena);
let bcx = init_function(&fcx, true, output);
let then = bcx.fcx.new_temp_block("then");
let catch = bcx.fcx.new_temp_block("catch");
let func = llvm::get_param(rust_try, 0);
let data = llvm::get_param(rust_try, 1);
let func = llvm::get_param(bcx.fcx.llfn, 0);
let data = llvm::get_param(bcx.fcx.llfn, 1);
let local_ptr = llvm::get_param(bcx.fcx.llfn, 2);
Invoke(bcx, func, &[data], then.llbb, catch.llbb, None, dloc);
Ret(then, C_null(Type::i8p(ccx)), dloc);
Ret(then, C_i32(ccx, 0), dloc);
// Type indicator for the exception being thrown.
// The first value in this tuple is a pointer to the exception object being thrown.
// The second value is a "selector" indicating which of the landing pad clauses
// the exception's type had been matched to. rust_try ignores the selector.
//
// The first value in this tuple is a pointer to the exception object
// being thrown. The second value is a "selector" indicating which of
// the landing pad clauses the exception's type had been matched to.
// rust_try ignores the selector.
let lpad_ty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)],
false);
let vals = LandingPad(catch, lpad_ty, catch_pers, 1);
AddClause(catch, vals, C_null(Type::i8p(ccx)));
let ptr = ExtractValue(catch, vals, 0);
Ret(catch, ptr, dloc);
fcx.cleanup();
return rust_try
Store(catch, ptr, BitCast(catch, local_ptr, Type::i8p(ccx).ptr_to()));
Ret(catch, C_i32(ccx, 1), dloc);
});
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = Call(bcx, llfn, &[func, data], None, dloc);
let ret = Call(bcx, llfn, &[func, data, local_ptr], None, dloc);
Store(bcx, ret, dest);
return bcx;
}
// Helper to generate the `Ty` associated with `rust_try`
// Helper function to give a Block to a closure to translate a shim function.
// This is currently primarily used for the `try` intrinsic functions above.
fn gen_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>,
name: &str,
ty: Ty<'tcx>,
output: ty::FnOutput<'tcx>,
trans: &mut for<'b> FnMut(Block<'b, 'tcx>))
-> ValueRef {
let ccx = fcx.ccx;
let llfn = declare::define_internal_rust_fn(ccx, name, ty);
let (fcx, block_arena);
block_arena = TypedArena::new();
fcx = new_fn_ctxt(ccx, llfn, ast::DUMMY_NODE_ID, false,
output, ccx.tcx().mk_substs(Substs::trans_empty()),
None, &block_arena);
let bcx = init_function(&fcx, true, output);
trans(bcx);
fcx.cleanup();
return llfn
}
// Helper function used to get a handle to the `__rust_try` function used to
// catch exceptions.
//
// This function is only generated once and is then cached.
fn get_rust_try_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>,
f: &mut FnMut(Ty<'tcx>,
ty::FnOutput<'tcx>) -> ValueRef)
trans: &mut for<'b> FnMut(Block<'b, 'tcx>))
-> ValueRef {
let ccx = fcx.ccx;
if let Some(llfn) = *ccx.rust_try_fn().borrow() {
@ -1270,21 +1269,125 @@ fn get_rust_try_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>,
}),
});
let fn_ty = tcx.mk_fn(None, fn_ty);
let output = ty::FnOutput::FnConverging(i8p);
let output = ty::FnOutput::FnConverging(tcx.types.i32);
let try_fn_ty = tcx.mk_bare_fn(ty::BareFnTy {
unsafety: hir::Unsafety::Unsafe,
abi: abi::Rust,
sig: ty::Binder(ty::FnSig {
inputs: vec![fn_ty, i8p],
inputs: vec![fn_ty, i8p, i8p],
output: output,
variadic: false,
}),
});
let rust_try = f(tcx.mk_fn(None, try_fn_ty), output);
let rust_try = gen_fn(fcx, "__rust_try", tcx.mk_fn(None, try_fn_ty), output,
trans);
*ccx.rust_try_fn().borrow_mut() = Some(rust_try);
return rust_try
}
// For MSVC-style exceptions (SEH), the compiler generates a filter function
// which is used to determine whether an exception is being caught (e.g. if it's
// a Rust exception or some other).
//
// This function is used to generate said filter function. The shim generated
// here is actually just a thin wrapper to call the real implementation in the
// standard library itself. For reasons as to why, see seh.rs in the standard
// library.
fn generate_filter_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>,
rust_try_fn: ValueRef)
-> ValueRef {
let ccx = fcx.ccx;
let tcx = ccx.tcx();
let dloc = DebugLoc::None;
let rust_try_filter = match ccx.tcx().lang_items.msvc_try_filter() {
Some(did) => callee::trans_fn_ref(ccx, did, ExprId(0),
fcx.param_substs).val,
None => ccx.sess().bug("msvc_try_filter not defined"),
};
let output = ty::FnOutput::FnConverging(tcx.types.i32);
let i8p = tcx.mk_mut_ptr(tcx.types.i8);
let frameaddress = ccx.get_intrinsic(&"llvm.frameaddress");
let recoverfp = ccx.get_intrinsic(&"llvm.x86.seh.recoverfp");
let localrecover = ccx.get_intrinsic(&"llvm.localrecover");
// On all platforms, once we have the EXCEPTION_POINTERS handle as well as
// the base pointer, we follow the standard layout of:
//
// block:
// %parentfp = call i8* llvm.x86.seh.recoverfp(@rust_try_fn, %bp)
// %arg = call i8* llvm.localrecover(@rust_try_fn, %parentfp, 0)
// %ret = call i32 @the_real_filter_function(%ehptrs, %arg)
// ret i32 %ret
//
// The recoverfp intrinsic is used to recover the frame frame pointer of the
// `rust_try_fn` function, which is then in turn passed to the
// `localrecover` intrinsic (pairing with the `localescape` intrinsic
// mentioned above). Putting all this together means that we now have a
// handle to the arguments passed into the `try` function, allowing writing
// to the stack over there.
//
// For more info, see seh.rs in the standard library.
let do_trans = |bcx: Block, ehptrs, base_pointer| {
let rust_try_fn = BitCast(bcx, rust_try_fn, Type::i8p(ccx));
let parentfp = Call(bcx, recoverfp, &[rust_try_fn, base_pointer],
None, dloc);
let arg = Call(bcx, localrecover,
&[rust_try_fn, parentfp, C_i32(ccx, 0)], None, dloc);
let ret = Call(bcx, rust_try_filter, &[ehptrs, arg], None, dloc);
Ret(bcx, ret, dloc);
};
if ccx.tcx().sess.target.target.arch == "x86" {
// On x86 the filter function doesn't actually receive any arguments.
// Instead the %ebp register contains some contextual information.
//
// Unfortunately I don't know of any great documentation as to what's
// going on here, all I can say is that there's a few tests cases in
// LLVM's test suite which follow this pattern of instructions, so we
// just do the same.
let filter_fn_ty = tcx.mk_bare_fn(ty::BareFnTy {
unsafety: hir::Unsafety::Unsafe,
abi: abi::Rust,
sig: ty::Binder(ty::FnSig {
inputs: vec![],
output: output,
variadic: false,
}),
});
let filter_fn_ty = tcx.mk_fn(None, filter_fn_ty);
gen_fn(fcx, "__rustc_try_filter", filter_fn_ty, output, &mut |bcx| {
let ebp = Call(bcx, frameaddress, &[C_i32(ccx, 1)], None, dloc);
let exn = InBoundsGEP(bcx, ebp, &[C_i32(ccx, -20)]);
let exn = Load(bcx, BitCast(bcx, exn, Type::i8p(ccx).ptr_to()));
do_trans(bcx, exn, ebp);
})
} else if ccx.tcx().sess.target.target.arch == "x86_64" {
// Conveniently on x86_64 the EXCEPTION_POINTERS handle and base pointer
// are passed in as arguments to the filter function, so we just pass
// those along.
let filter_fn_ty = tcx.mk_bare_fn(ty::BareFnTy {
unsafety: hir::Unsafety::Unsafe,
abi: abi::Rust,
sig: ty::Binder(ty::FnSig {
inputs: vec![i8p, i8p],
output: output,
variadic: false,
}),
});
let filter_fn_ty = tcx.mk_fn(None, filter_fn_ty);
gen_fn(fcx, "__rustc_try_filter", filter_fn_ty, output, &mut |bcx| {
let exn = llvm::get_param(bcx.fcx.llfn, 0);
let rbp = llvm::get_param(bcx.fcx.llfn, 1);
do_trans(bcx, exn, rbp);
})
} else {
panic!("unknown target to generate a filter function")
}
}
fn span_invalid_monomorphization_error(a: &Session, b: Span, c: &str) {
span_err!(a, b, E0511, "{}", c);
}

View File

@ -16,7 +16,7 @@ use trans::adt;
use trans::attributes;
use trans::base;
use trans::build;
use trans::common::{self, Block};
use trans::common::{self, Block, LandingPad};
use trans::debuginfo::DebugLoc;
use trans::foreign;
use trans::type_of;
@ -162,7 +162,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let cleanup = self.bcx(targets.1);
let landingpad = self.make_landing_pad(cleanup);
let (target, postinvoke) = if must_copy_dest {
(bcx.fcx.new_block(false, "", None), Some(self.bcx(targets.0)))
(bcx.fcx.new_block("", None),
Some(self.bcx(targets.0)))
} else {
(self.bcx(targets.0), None)
};
@ -267,7 +268,9 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}
fn make_landing_pad(&mut self, cleanup: Block<'bcx, 'tcx>) -> Block<'bcx, 'tcx> {
let bcx = cleanup.fcx.new_block(true, "cleanup", None);
let bcx = cleanup.fcx.new_block("cleanup", None);
// FIXME(#30941) this doesn't handle msvc-style exceptions
*bcx.lpad.borrow_mut() = Some(LandingPad::gnu());
let ccx = bcx.ccx();
let llpersonality = bcx.fcx.eh_personality();
let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
@ -283,7 +286,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
match self.unreachable_block {
Some(b) => b,
None => {
let bl = self.fcx.new_block(false, "unreachable", None);
let bl = self.fcx.new_block("unreachable", None);
build::Unreachable(bl);
self.unreachable_block = Some(bl);
bl

View File

@ -14,7 +14,7 @@ use rustc::mir::repr as mir;
use rustc::mir::tcx::LvalueTy;
use trans::base;
use trans::build;
use trans::common::{self, Block};
use trans::common::{self, Block, LandingPad};
use trans::debuginfo::DebugLoc;
use trans::expr;
use trans::type_of;
@ -114,8 +114,12 @@ pub fn trans_mir<'bcx, 'tcx>(bcx: Block<'bcx, 'tcx>) {
let block_bcxs: Vec<Block<'bcx,'tcx>> =
mir_blocks.iter()
.map(|&bb|{
let is_cleanup = mir.basic_block_data(bb).is_cleanup;
fcx.new_block(is_cleanup, &format!("{:?}", bb), None)
let bcx = fcx.new_block(&format!("{:?}", bb), None);
// FIXME(#30941) this doesn't handle msvc-style exceptions
if mir.basic_block_data(bb).is_cleanup {
*bcx.lpad.borrow_mut() = Some(LandingPad::gnu())
}
bcx
})
.collect();

View File

@ -220,7 +220,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let (llval, ll_t_in, signed) = if let CastTy::Int(IntTy::CEnum) = r_t_in {
let repr = adt::represent_type(bcx.ccx(), operand.ty);
let llval = operand.immediate();
let discr = adt::trans_get_discr(bcx, &*repr, llval, None);
let discr = adt::trans_get_discr(bcx, &*repr, llval,
None, true);
(discr, common::val_ty(discr), adt::is_discr_signed(&*repr))
} else {
(operand.immediate(), ll_t_in, operand.ty.is_signed())

View File

@ -293,7 +293,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) {
}),
};
let fn_ty = tcx.mk_bare_fn(fn_ty);
(0, vec![tcx.mk_fn(None, fn_ty), mut_u8], mut_u8)
(0, vec![tcx.mk_fn(None, fn_ty), mut_u8, mut_u8], tcx.types.i32)
}
ref other => {

View File

@ -41,6 +41,11 @@ pub unsafe fn panic(data: Box<Any + Send + 'static>) -> ! {
}
}
#[cfg(not(stage0))]
pub fn payload() -> *mut u8 {
0 as *mut u8
}
pub unsafe fn cleanup(ptr: *mut u8) -> Box<Any + Send + 'static> {
let my_ep = ptr as *mut Exception;
let cause = (*my_ep).cause.take();

View File

@ -83,13 +83,13 @@ use sys_common::mutex::Mutex;
// implementations. One goes through SEH on Windows and the other goes through
// libgcc via the libunwind-like API.
// i686-pc-windows-msvc
#[cfg(all(windows, target_arch = "x86", target_env = "msvc"))]
// *-pc-windows-msvc
#[cfg(target_env = "msvc")]
#[path = "seh.rs"] #[doc(hidden)]
pub mod imp;
// x86_64-pc-windows-*
#[cfg(all(windows, target_arch = "x86_64"))]
// x86_64-pc-windows-gnu
#[cfg(all(windows, target_arch = "x86_64", target_env = "gnu"))]
#[path = "seh64_gnu.rs"] #[doc(hidden)]
pub mod imp;
@ -122,45 +122,54 @@ pub unsafe fn try<F: FnOnce()>(f: F) -> Result<(), Box<Any + Send>> {
let mut f = Some(f);
return inner_try(try_fn::<F>, &mut f as *mut _ as *mut u8);
// If an inner function were not used here, then this generic function `try`
// uses the native symbol `rust_try`, for which the code is statically
// linked into the standard library. This means that the DLL for the
// standard library must have `rust_try` as an exposed symbol that
// downstream crates can link against (because monomorphizations of `try` in
// downstream crates will have a reference to the `rust_try` symbol).
//
// On MSVC this requires the symbol `rust_try` to be tagged with
// `dllexport`, but it's easier to not have conditional `src/rt/rust_try.ll`
// files and instead just have this non-generic shim the compiler can take
// care of exposing correctly.
unsafe fn inner_try(f: fn(*mut u8), data: *mut u8)
-> Result<(), Box<Any + Send>> {
PANIC_COUNT.with(|s| {
let prev = s.get();
s.set(0);
let ep = intrinsics::try(f, data);
s.set(prev);
if ep.is_null() {
Ok(())
} else {
Err(imp::cleanup(ep))
}
})
}
fn try_fn<F: FnOnce()>(opt_closure: *mut u8) {
let opt_closure = opt_closure as *mut Option<F>;
unsafe { (*opt_closure).take().unwrap()(); }
}
}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern fn(*mut u8),
data: *mut u8) -> *mut u8;
}
#[cfg(not(stage0))]
unsafe fn inner_try(f: fn(*mut u8), data: *mut u8)
-> Result<(), Box<Any + Send>> {
PANIC_COUNT.with(|s| {
let prev = s.get();
s.set(0);
// The "payload" here is a platform-specific region of memory which is
// used to transmit information about the exception being thrown from
// the point-of-throw back to this location.
//
// A pointer to this data is passed to the `try` intrinsic itself,
// allowing this function, the `try` intrinsic, imp::payload(), and
// imp::cleanup() to all work in concert to transmit this information.
//
// More information about what this pointer actually is can be found in
// each implementation as well as browsing the compiler source itself.
let mut payload = imp::payload();
let r = intrinsics::try(f, data, &mut payload as *mut _ as *mut _);
s.set(prev);
if r == 0 {
Ok(())
} else {
Err(imp::cleanup(payload))
}
})
}
#[cfg(stage0)]
unsafe fn inner_try(f: fn(*mut u8), data: *mut u8)
-> Result<(), Box<Any + Send>> {
PANIC_COUNT.with(|s| {
let prev = s.get();
s.set(0);
let ep = intrinsics::try(f, data);
s.set(prev);
if ep.is_null() {
Ok(())
} else {
Err(imp::cleanup(ep))
}
})
}
/// Determines whether the current thread is unwinding because of panic.

View File

@ -8,109 +8,175 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Win64 SEH (see http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx)
//! Windows SEH
//!
//! On Windows (currently only on MSVC), the default exception handling
//! mechanism is Structured Exception Handling (SEH). This is quite different
//! than Dwarf-based exception handling (e.g. what other unix platforms use) in
//! terms of compiler internals, so LLVM is required to have a good deal of
//! extra support for SEH. Currently this support is somewhat lacking, so what's
//! here is the bare bones of SEH support.
//! extra support for SEH.
//!
//! In a nutshell, what happens here is:
//!
//! 1. The `panic` function calls the standard Windows function `RaiseException`
//! with a Rust-specific code, triggering the unwinding process.
//! 2. All landing pads generated by the compiler (just "cleanup" landing pads)
//! use the personality function `__C_specific_handler`, a function in the
//! CRT, and the unwinding code in Windows will use this personality function
//! to execute all cleanup code on the stack.
//! 3. Eventually the "catch" code in `rust_try` (located in
//! src/rt/rust_try_msvc_64.ll) is executed, which will ensure that the
//! exception being caught is indeed a Rust exception, returning control back
//! into Rust.
//! 2. All landing pads generated by the compiler use the personality function
//! `__C_specific_handler` on 64-bit and `__except_handler3` on 32-bit,
//! functions in the CRT, and the unwinding code in Windows will use this
//! personality function to execute all cleanup code on the stack.
//! 3. All compiler-generated calls to `invoke` have a landing pad set as a
//! `cleanuppad` LLVM instruction, which indicates the start of the cleanup
//! routine. The personality (in step 2, defined in the CRT) is responsible
//! for running the cleanup routines.
//! 4. Eventually the "catch" code in the `try` intrinsic (generated by the
//! compiler) is executed, which will ensure that the exception being caught
//! is indeed a Rust exception, indicating that control should come back to
//! Rust. This is done via a `catchswitch` plus a `catchpad` instruction in
//! LLVM IR terms, finally returning normal control to the program with a
//! `catchret` instruction. The `try` intrinsic uses a filter function to
//! detect what kind of exception is being thrown, and this detection is
//! implemented as the msvc_try_filter language item below.
//!
//! Some specific differences from the gcc-based exception handling are:
//!
//! * Rust has no custom personality function, it is instead *always*
//! __C_specific_handler, so the filtering is done in a C++-like manner
//! instead of in the personality function itself. Note that the specific
//! syntax for this (found in the rust_try_msvc_64.ll) is taken from an LLVM
//! test case for SEH.
//! __C_specific_handler or __except_handler3, so the filtering is done in a
//! C++-like manner instead of in the personality function itself. Note that
//! the precise codegen for this was lifted from an LLVM test case for SEH
//! (this is the `__rust_try_filter` function below).
//! * We've got some data to transmit across the unwinding boundary,
//! specifically a `Box<Any + Send + 'static>`. In Dwarf-based unwinding this
//! data is part of the payload of the exception, but I have not currently
//! figured out how to do this with LLVM's bindings. Judging by some comments
//! in the LLVM test cases this may not even be possible currently with LLVM,
//! so this is just abandoned entirely. Instead the data is stored in a
//! thread-local in `panic` and retrieved during `cleanup`.
//! specifically a `Box<Any + Send + 'static>`. Like with Dwarf exceptions
//! these two pointers are stored as a payload in the exception itself. On
//! MSVC, however, there's no need for an extra allocation because the call
//! stack is preserved while filter functions are being executed. This means
//! that the pointers are passed directly to `RaiseException` which are then
//! recovered in the filter function to be written to the stack frame of the
//! `try` intrinsic.
//!
//! So given all that, the bindings here are pretty small,
//! [win64]: http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx
//! [llvm]: http://llvm.org/docs/ExceptionHandling.html#background-on-windows-exceptions
#![allow(bad_style)]
use prelude::v1::*;
use any::Any;
use ptr;
use sys_common::thread_local::StaticKey;
use sys::c;
// 0x R U S T
const RUST_PANIC: c::DWORD = 0x52555354;
static PANIC_DATA: StaticKey = StaticKey::new(None);
// A code which indicates panics that originate from Rust. Note that some of the
// upper bits are used by the system so we just set them to 0 and ignore them.
// 0x 0 R S T
const RUST_PANIC: c::DWORD = 0x00525354;
pub unsafe fn panic(data: Box<Any + Send + 'static>) -> ! {
// See module docs above for an explanation of why `data` is stored in a
// thread local instead of being passed as an argument to the
// `RaiseException` function (which can in theory carry along arbitrary
// data).
let exception = Box::new(data);
rtassert!(PANIC_DATA.get().is_null());
PANIC_DATA.set(Box::into_raw(exception) as *mut u8);
pub use self::imp::*;
c::RaiseException(RUST_PANIC, 0, 0, ptr::null());
rtabort!("could not unwind stack");
#[cfg(stage0)]
mod imp {
use prelude::v1::*;
use any::Any;
pub unsafe fn panic(_data: Box<Any + Send + 'static>) -> ! {
rtabort!("cannot unwind SEH in stage0")
}
pub unsafe fn cleanup(_ptr: *mut u8) -> Box<Any + Send + 'static> {
rtabort!("can't cleanup SEH in stage0")
}
#[lang = "msvc_try_filter"]
#[linkage = "external"]
unsafe extern fn __rust_try_filter() -> i32 {
0
}
#[lang = "eh_unwind_resume"]
#[unwind]
unsafe extern fn rust_eh_unwind_resume(_ptr: *mut u8) -> ! {
rtabort!("can't resume unwind SEH in stage0")
}
#[lang = "eh_personality_catch"]
unsafe extern fn rust_eh_personality_catch() {}
}
pub unsafe fn cleanup(ptr: *mut u8) -> Box<Any + Send + 'static> {
// The `ptr` here actually corresponds to the code of the exception, and our
// real data is stored in our thread local.
rtassert!(ptr as c::DWORD == RUST_PANIC);
#[cfg(not(stage0))]
mod imp {
use prelude::v1::*;
let data = PANIC_DATA.get() as *mut Box<Any + Send + 'static>;
PANIC_DATA.set(ptr::null_mut());
rtassert!(!data.is_null());
use any::Any;
use mem;
use raw;
use super::RUST_PANIC;
use sys::c;
*Box::from_raw(data)
pub unsafe fn panic(data: Box<Any + Send + 'static>) -> ! {
// As mentioned above, the call stack here is preserved while the filter
// functions are running, so it's ok to pass stack-local arrays into
// `RaiseException`.
//
// The two pointers of the `data` trait object are written to the stack,
// passed to `RaiseException`, and they're later extracted by the filter
// function below in the "custom exception information" section of the
// `EXCEPTION_RECORD` type.
let ptrs = mem::transmute::<_, raw::TraitObject>(data);
let ptrs = [ptrs.data, ptrs.vtable];
c::RaiseException(RUST_PANIC, 0, 2, ptrs.as_ptr() as *mut _);
rtabort!("could not unwind stack");
}
pub fn payload() -> [usize; 2] {
[0; 2]
}
pub unsafe fn cleanup(payload: [usize; 2]) -> Box<Any + Send + 'static> {
mem::transmute(raw::TraitObject {
data: payload[0] as *mut _,
vtable: payload[1] as *mut _,
})
}
// This is quite a special function, and it's not literally passed in as the
// filter function for the `catchpad` of the `try` intrinsic. The compiler
// actually generates its own filter function wrapper which will delegate to
// this for the actual execution logic for whether the exception should be
// caught. The reasons for this are:
//
// * Each architecture has a slightly different ABI for the filter function
// here. For example on x86 there are no arguments but on x86_64 there are
// two.
// * This function needs access to the stack frame of the `try` intrinsic
// which is using this filter as a catch pad. This is because the payload
// of this exception, `Box<Any>`, needs to be transmitted to that
// location.
//
// Both of these differences end up using a ton of weird llvm-specific
// intrinsics, so it's actually pretty difficult to express the entire
// filter function in Rust itself. As a compromise, the compiler takes care
// of all the weird LLVM-specific and platform-specific stuff, getting to
// the point where this function makes the actual decision about what to
// catch given two parameters.
//
// The first parameter is `*mut EXCEPTION_POINTERS` which is some contextual
// information about the exception being filtered, and the second pointer is
// `*mut *mut [usize; 2]` (the payload here). This value points directly
// into the stack frame of the `try` intrinsic itself, and we use it to copy
// information from the exception onto the stack.
#[lang = "msvc_try_filter"]
#[cfg(not(test))]
unsafe extern fn __rust_try_filter(eh_ptrs: *mut u8,
payload: *mut u8) -> i32 {
let eh_ptrs = eh_ptrs as *mut c::EXCEPTION_POINTERS;
let payload = payload as *mut *mut [usize; 2];
let record = &*(*eh_ptrs).ExceptionRecord;
if record.ExceptionCode != RUST_PANIC {
return 0
}
(**payload)[0] = record.ExceptionInformation[0] as usize;
(**payload)[1] = record.ExceptionInformation[1] as usize;
return 1
}
}
// This is required by the compiler to exist (e.g. it's a lang item), but it's
// never actually called by the compiler because __C_specific_handler is the
// personality function that is always used. Hence this is just an aborting
// stub.
// This is required by the compiler to exist (e.g. it's a lang item), but
// it's never actually called by the compiler because __C_specific_handler
// or _except_handler3 is the personality function that is always used.
// Hence this is just an aborting stub.
#[lang = "eh_personality"]
#[cfg(not(test))]
fn rust_eh_personality() {
unsafe { ::intrinsics::abort() }
}
// This is a function referenced from `rust_try_msvc_64.ll` which is used to
// filter the exceptions being caught by that function.
//
// In theory local variables can be accessed through the `rbp` parameter of this
// function, but a comment in an LLVM test case indicates that this is not
// implemented in LLVM, so this is just an idempotent function which doesn't
// ferry along any other information.
//
// This function just takes a look at the current EXCEPTION_RECORD being thrown
// to ensure that it's code is RUST_PANIC, which was set by the call to
// `RaiseException` above in the `panic` function.
#[lang = "msvc_try_filter"]
#[linkage = "external"]
#[allow(private_no_mangle_fns)]
extern fn __rust_try_filter(eh_ptrs: *mut c::EXCEPTION_POINTERS,
_rbp: *mut u8) -> i32 {
unsafe {
((*(*eh_ptrs).ExceptionRecord).ExceptionCode == RUST_PANIC) as i32
}
}

View File

@ -50,6 +50,11 @@ pub unsafe fn panic(data: Box<Any + Send + 'static>) -> ! {
rtabort!("could not unwind stack");
}
#[cfg(not(stage0))]
pub fn payload() -> *mut u8 {
0 as *mut u8
}
pub unsafe fn cleanup(ptr: *mut u8) -> Box<Any + Send + 'static> {
let panic_ctx = Box::from_raw(ptr as *mut PanicData);
return panic_ctx.data;

View File

@ -987,3 +987,181 @@ LLVMRustBuildLandingPad(LLVMBuilderRef Builder,
LLVMValueRef F) {
return LLVMBuildLandingPad(Builder, Ty, PersFn, NumClauses, Name);
}
extern "C" LLVMValueRef
LLVMRustBuildCleanupPad(LLVMBuilderRef Builder,
LLVMValueRef ParentPad,
unsigned ArgCnt,
LLVMValueRef *LLArgs,
const char *Name) {
#if LLVM_VERSION_MINOR >= 8
Value **Args = unwrap(LLArgs);
if (ParentPad == NULL) {
Type *Ty = Type::getTokenTy(unwrap(Builder)->getContext());
ParentPad = wrap(Constant::getNullValue(Ty));
}
return wrap(unwrap(Builder)->CreateCleanupPad(unwrap(ParentPad),
ArrayRef<Value*>(Args, ArgCnt),
Name));
#else
return NULL;
#endif
}
extern "C" LLVMValueRef
LLVMRustBuildCleanupRet(LLVMBuilderRef Builder,
LLVMValueRef CleanupPad,
LLVMBasicBlockRef UnwindBB) {
#if LLVM_VERSION_MINOR >= 8
CleanupPadInst *Inst = cast<CleanupPadInst>(unwrap(CleanupPad));
return wrap(unwrap(Builder)->CreateCleanupRet(Inst, unwrap(UnwindBB)));
#else
return NULL;
#endif
}
extern "C" LLVMValueRef
LLVMRustBuildCatchPad(LLVMBuilderRef Builder,
LLVMValueRef ParentPad,
unsigned ArgCnt,
LLVMValueRef *LLArgs,
const char *Name) {
#if LLVM_VERSION_MINOR >= 8
Value **Args = unwrap(LLArgs);
return wrap(unwrap(Builder)->CreateCatchPad(unwrap(ParentPad),
ArrayRef<Value*>(Args, ArgCnt),
Name));
#else
return NULL;
#endif
}
extern "C" LLVMValueRef
LLVMRustBuildCatchRet(LLVMBuilderRef Builder,
LLVMValueRef Pad,
LLVMBasicBlockRef BB) {
#if LLVM_VERSION_MINOR >= 8
return wrap(unwrap(Builder)->CreateCatchRet(cast<CatchPadInst>(unwrap(Pad)),
unwrap(BB)));
#else
return NULL;
#endif
}
extern "C" LLVMValueRef
LLVMRustBuildCatchSwitch(LLVMBuilderRef Builder,
LLVMValueRef ParentPad,
LLVMBasicBlockRef BB,
unsigned NumHandlers,
const char *Name) {
#if LLVM_VERSION_MINOR >= 8
if (ParentPad == NULL) {
Type *Ty = Type::getTokenTy(unwrap(Builder)->getContext());
ParentPad = wrap(Constant::getNullValue(Ty));
}
return wrap(unwrap(Builder)->CreateCatchSwitch(unwrap(ParentPad),
unwrap(BB),
NumHandlers,
Name));
#else
return NULL;
#endif
}
extern "C" void
LLVMRustAddHandler(LLVMValueRef CatchSwitchRef,
LLVMBasicBlockRef Handler) {
#if LLVM_VERSION_MINOR >= 8
Value *CatchSwitch = unwrap(CatchSwitchRef);
cast<CatchSwitchInst>(CatchSwitch)->addHandler(unwrap(Handler));
#endif
}
extern "C" void
LLVMRustSetPersonalityFn(LLVMBuilderRef B,
LLVMValueRef Personality) {
#if LLVM_VERSION_MINOR >= 8
unwrap(B)->GetInsertBlock()
->getParent()
->setPersonalityFn(cast<Function>(unwrap(Personality)));
#endif
}
#if LLVM_VERSION_MINOR >= 8
extern "C" OperandBundleDef*
LLVMRustBuildOperandBundleDef(const char *Name,
LLVMValueRef *Inputs,
unsigned NumInputs) {
return new OperandBundleDef(Name, makeArrayRef(unwrap(Inputs), NumInputs));
}
extern "C" void
LLVMRustFreeOperandBundleDef(OperandBundleDef* Bundle) {
delete Bundle;
}
extern "C" LLVMValueRef
LLVMRustBuildCall(LLVMBuilderRef B,
LLVMValueRef Fn,
LLVMValueRef *Args,
unsigned NumArgs,
OperandBundleDef *Bundle,
const char *Name) {
unsigned len = Bundle ? 1 : 0;
ArrayRef<OperandBundleDef> Bundles = makeArrayRef(Bundle, len);
return wrap(unwrap(B)->CreateCall(unwrap(Fn),
makeArrayRef(unwrap(Args), NumArgs),
Bundles,
Name));
}
extern "C" LLVMValueRef
LLVMRustBuildInvoke(LLVMBuilderRef B,
LLVMValueRef Fn,
LLVMValueRef *Args,
unsigned NumArgs,
LLVMBasicBlockRef Then,
LLVMBasicBlockRef Catch,
OperandBundleDef *Bundle,
const char *Name) {
unsigned len = Bundle ? 1 : 0;
ArrayRef<OperandBundleDef> Bundles = makeArrayRef(Bundle, len);
return wrap(unwrap(B)->CreateInvoke(unwrap(Fn), unwrap(Then), unwrap(Catch),
makeArrayRef(unwrap(Args), NumArgs),
Bundles,
Name));
}
#else
extern "C" void*
LLVMRustBuildOperandBundleDef(const char *Name,
LLVMValueRef *Inputs,
unsigned NumInputs) {
return NULL;
}
extern "C" void
LLVMRustFreeOperandBundleDef(void* Bundle) {
}
extern "C" LLVMValueRef
LLVMRustBuildCall(LLVMBuilderRef B,
LLVMValueRef Fn,
LLVMValueRef *Args,
unsigned NumArgs,
void *Bundle,
const char *Name) {
return LLVMBuildCall(B, Fn, Args, NumArgs, Name);
}
extern "C" LLVMValueRef
LLVMRustBuildInvoke(LLVMBuilderRef B,
LLVMValueRef Fn,
LLVMValueRef *Args,
unsigned NumArgs,
LLVMBasicBlockRef Then,
LLVMBasicBlockRef Catch,
void *Bundle,
const char *Name) {
return LLVMBuildInvoke(B, Fn, Args, NumArgs, Then, Catch, Name);
}
#endif