Miri Memory Work
* Unify the two maps in memory to store the allocation and its kind together. * Share the handling of statics between CTFE and miri: The miri engine always uses "lazy" `AllocType::Static` when encountering a static. Acessing that static invokes CTFE (no matter the machine). The machine only has any influence when writing to a static, which CTFE outright rejects (but miri makes a copy-on-write). * Add an `AllocId` to by-ref consts so miri can use them as operands without making copies. * Move responsibilities around for the `eval_fn_call` machine hook: The hook just has to find the MIR (or entirely take care of everything); pushing the new stack frame is taken care of by the miri engine. * Expose the intrinsics and lang items implemented by CTFE so miri does not have to reimplement them.
This commit is contained in:
parent
b638d8c75f
commit
c141ccf158
@ -384,7 +384,8 @@ for ::mir::interpret::ConstValue<'gcx> {
|
||||
a.hash_stable(hcx, hasher);
|
||||
b.hash_stable(hcx, hasher);
|
||||
}
|
||||
ByRef(alloc, offset) => {
|
||||
ByRef(id, alloc, offset) => {
|
||||
id.hash_stable(hcx, hasher);
|
||||
alloc.hash_stable(hcx, hasher);
|
||||
offset.hash_stable(hcx, hasher);
|
||||
}
|
||||
@ -446,7 +447,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::Allocation {
|
||||
}
|
||||
self.undef_mask.hash_stable(hcx, hasher);
|
||||
self.align.hash_stable(hcx, hasher);
|
||||
self.runtime_mutability.hash_stable(hcx, hasher);
|
||||
self.mutability.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -393,7 +393,8 @@ impl fmt::Display for AllocId {
|
||||
pub enum AllocType<'tcx, M> {
|
||||
/// The alloc id is used as a function pointer
|
||||
Function(Instance<'tcx>),
|
||||
/// The alloc id points to a static variable
|
||||
/// The alloc id points to a "lazy" static variable that did not get computed (yet).
|
||||
/// This is also used to break the cycle in recursive statics.
|
||||
Static(DefId),
|
||||
/// The alloc id points to memory
|
||||
Memory(M)
|
||||
@ -496,13 +497,14 @@ pub struct Allocation {
|
||||
pub undef_mask: UndefMask,
|
||||
/// The alignment of the allocation to detect unaligned reads.
|
||||
pub align: Align,
|
||||
/// Whether the allocation (of a static) should be put into mutable memory when codegenning
|
||||
///
|
||||
/// Only happens for `static mut` or `static` with interior mutability
|
||||
pub runtime_mutability: Mutability,
|
||||
/// Whether the allocation is mutable.
|
||||
/// Also used by codegen to determine if a static should be put into mutable memory,
|
||||
/// which happens for `static mut` and `static` with interior mutability.
|
||||
pub mutability: Mutability,
|
||||
}
|
||||
|
||||
impl Allocation {
|
||||
/// Creates a read-only allocation initialized by the given bytes
|
||||
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
|
||||
let mut undef_mask = UndefMask::new(Size::ZERO);
|
||||
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
|
||||
@ -511,7 +513,7 @@ impl Allocation {
|
||||
relocations: Relocations::new(),
|
||||
undef_mask,
|
||||
align,
|
||||
runtime_mutability: Mutability::Immutable,
|
||||
mutability: Mutability::Immutable,
|
||||
}
|
||||
}
|
||||
|
||||
@ -526,7 +528,7 @@ impl Allocation {
|
||||
relocations: Relocations::new(),
|
||||
undef_mask: UndefMask::new(size),
|
||||
align,
|
||||
runtime_mutability: Mutability::Immutable,
|
||||
mutability: Mutability::Mutable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ use ty::layout::{HasDataLayout, Size};
|
||||
use ty::subst::Substs;
|
||||
use hir::def_id::DefId;
|
||||
|
||||
use super::{EvalResult, Pointer, PointerArithmetic, Allocation};
|
||||
use super::{EvalResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend};
|
||||
|
||||
/// Represents a constant value in Rust. Scalar and ScalarPair are optimizations which
|
||||
/// matches the LocalValue optimizations for easy conversions between Value and ConstValue.
|
||||
@ -32,8 +32,9 @@ pub enum ConstValue<'tcx> {
|
||||
///
|
||||
/// The second field may be undef in case of `Option<usize>::None`
|
||||
ScalarPair(Scalar, ScalarMaybeUndef),
|
||||
/// Used only for the remaining cases. An allocation + offset into the allocation
|
||||
ByRef(&'tcx Allocation, Size),
|
||||
/// Used only for the remaining cases. An allocation + offset into the allocation.
|
||||
/// Invariant: The AllocId matches the allocation.
|
||||
ByRef(AllocId, &'tcx Allocation, Size),
|
||||
}
|
||||
|
||||
impl<'tcx> ConstValue<'tcx> {
|
||||
@ -185,6 +186,49 @@ impl<'tcx> Scalar {
|
||||
_ => err!(InvalidBool),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_u8(self) -> EvalResult<'static, u8> {
|
||||
let sz = Size::from_bits(8);
|
||||
let b = self.to_bits(sz)?;
|
||||
assert_eq!(b as u8 as u128, b);
|
||||
Ok(b as u8)
|
||||
}
|
||||
|
||||
fn to_u32(self) -> EvalResult<'static, u32> {
|
||||
let sz = Size::from_bits(32);
|
||||
let b = self.to_bits(sz)?;
|
||||
assert_eq!(b as u32 as u128, b);
|
||||
Ok(b as u32)
|
||||
}
|
||||
|
||||
fn to_usize(self, cx: impl HasDataLayout) -> EvalResult<'static, u64> {
|
||||
let b = self.to_bits(cx.data_layout().pointer_size)?;
|
||||
assert_eq!(b as u64 as u128, b);
|
||||
Ok(b as u64)
|
||||
}
|
||||
|
||||
fn to_i8(self) -> EvalResult<'static, i8> {
|
||||
let sz = Size::from_bits(8);
|
||||
let b = self.to_bits(sz)?;
|
||||
let b = sign_extend(b, sz) as i128;
|
||||
assert_eq!(b as i8 as i128, b);
|
||||
Ok(b as i8)
|
||||
}
|
||||
|
||||
fn to_i32(self) -> EvalResult<'static, i32> {
|
||||
let sz = Size::from_bits(32);
|
||||
let b = self.to_bits(sz)?;
|
||||
let b = sign_extend(b, sz) as i128;
|
||||
assert_eq!(b as i32 as i128, b);
|
||||
Ok(b as i32)
|
||||
}
|
||||
|
||||
fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'static, i64> {
|
||||
let b = self.to_bits(cx.data_layout().pointer_size)?;
|
||||
let b = sign_extend(b, cx.data_layout().pointer_size) as i128;
|
||||
assert_eq!(b as i64 as i128, b);
|
||||
Ok(b as i64)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Pointer> for Scalar {
|
||||
@ -228,6 +272,7 @@ impl From<Scalar> for ScalarMaybeUndef {
|
||||
}
|
||||
|
||||
impl<'tcx> ScalarMaybeUndef {
|
||||
#[inline]
|
||||
pub fn not_undef(self) -> EvalResult<'static, Scalar> {
|
||||
match self {
|
||||
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
|
||||
@ -235,15 +280,48 @@ impl<'tcx> ScalarMaybeUndef {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> {
|
||||
self.not_undef()?.to_ptr()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
|
||||
self.not_undef()?.to_bits(target_size)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
|
||||
self.not_undef()?.to_bool()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_u8(self) -> EvalResult<'tcx, u8> {
|
||||
self.not_undef()?.to_u8()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_u32(self) -> EvalResult<'tcx, u32> {
|
||||
self.not_undef()?.to_u32()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_usize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, u64> {
|
||||
self.not_undef()?.to_usize(cx)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_i8(self) -> EvalResult<'tcx, i8> {
|
||||
self.not_undef()?.to_i8()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_i32(self) -> EvalResult<'tcx, i32> {
|
||||
self.not_undef()?.to_i32()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, i64> {
|
||||
self.not_undef()?.to_isize(cx)
|
||||
}
|
||||
}
|
||||
|
@ -1043,13 +1043,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
let interned = self.global_arenas.const_allocs.alloc(alloc);
|
||||
if let Some(prev) = allocs.replace(interned) {
|
||||
if let Some(prev) = allocs.replace(interned) { // insert into interner
|
||||
bug!("Tried to overwrite interned Allocation: {:#?}", prev)
|
||||
}
|
||||
interned
|
||||
}
|
||||
|
||||
/// Allocates a byte or string literal for `mir::interpret`
|
||||
/// Allocates a byte or string literal for `mir::interpret`, read-only
|
||||
pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
|
||||
// create an allocation that just contains these bytes
|
||||
let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes);
|
||||
|
@ -1139,7 +1139,7 @@ impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
|
||||
match *self {
|
||||
ConstValue::Scalar(v) => ConstValue::Scalar(v),
|
||||
ConstValue::ScalarPair(a, b) => ConstValue::ScalarPair(a, b),
|
||||
ConstValue::ByRef(alloc, offset) => ConstValue::ByRef(alloc, offset),
|
||||
ConstValue::ByRef(id, alloc, offset) => ConstValue::ByRef(id, alloc, offset),
|
||||
ConstValue::Unevaluated(def_id, substs) => {
|
||||
ConstValue::Unevaluated(def_id, substs.fold_with(folder))
|
||||
}
|
||||
@ -1150,7 +1150,7 @@ impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
|
||||
match *self {
|
||||
ConstValue::Scalar(_) |
|
||||
ConstValue::ScalarPair(_, _) |
|
||||
ConstValue::ByRef(_, _) => false,
|
||||
ConstValue::ByRef(_, _, _) => false,
|
||||
ConstValue::Unevaluated(_, substs) => substs.visit_with(visitor),
|
||||
}
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ pub fn scalar_to_llvm(
|
||||
let base_addr = match alloc_type {
|
||||
Some(AllocType::Memory(alloc)) => {
|
||||
let init = const_alloc_to_llvm(cx, alloc);
|
||||
if alloc.runtime_mutability == Mutability::Mutable {
|
||||
if alloc.mutability == Mutability::Mutable {
|
||||
consts::addr_of_mut(cx, init, alloc.align, None)
|
||||
} else {
|
||||
consts::addr_of(cx, init, alloc.align, None)
|
||||
@ -134,7 +134,7 @@ pub fn codegen_static_initializer(
|
||||
let static_ = cx.tcx.const_eval(param_env.and(cid))?;
|
||||
|
||||
let alloc = match static_.val {
|
||||
ConstValue::ByRef(alloc, n) if n.bytes() == 0 => alloc,
|
||||
ConstValue::ByRef(_, alloc, n) if n.bytes() == 0 => alloc,
|
||||
_ => bug!("static const eval returned {:#?}", static_),
|
||||
};
|
||||
Ok((const_alloc_to_llvm(cx, alloc), alloc))
|
||||
|
@ -126,7 +126,7 @@ impl OperandRef<'ll, 'tcx> {
|
||||
};
|
||||
OperandValue::Pair(a_llval, b_llval)
|
||||
},
|
||||
ConstValue::ByRef(alloc, offset) => {
|
||||
ConstValue::ByRef(_, alloc, offset) => {
|
||||
return Ok(PlaceRef::from_const_alloc(bx, layout, alloc, offset).load(bx));
|
||||
},
|
||||
};
|
||||
|
@ -458,7 +458,7 @@ impl FunctionCx<'a, 'll, 'tcx> {
|
||||
let layout = cx.layout_of(self.monomorphize(&ty));
|
||||
match bx.tcx().const_eval(param_env.and(cid)) {
|
||||
Ok(val) => match val.val {
|
||||
mir::interpret::ConstValue::ByRef(alloc, offset) => {
|
||||
mir::interpret::ConstValue::ByRef(_, alloc, offset) => {
|
||||
PlaceRef::from_const_alloc(bx, layout, alloc, offset)
|
||||
}
|
||||
_ => bug!("promoteds should have an allocation: {:?}", val),
|
||||
|
@ -14,23 +14,22 @@ use std::error::Error;
|
||||
use rustc::hir;
|
||||
use rustc::mir::interpret::ConstEvalErr;
|
||||
use rustc::mir;
|
||||
use rustc::ty::{self, TyCtxt, Instance};
|
||||
use rustc::ty::layout::{LayoutOf, Primitive, TyLayout, Size};
|
||||
use rustc::ty::{self, ParamEnv, TyCtxt, Instance, query::TyCtxtAt};
|
||||
use rustc::ty::layout::{LayoutOf, TyLayout};
|
||||
use rustc::ty::subst::Subst;
|
||||
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
|
||||
|
||||
use syntax::ast::Mutability;
|
||||
use syntax::source_map::Span;
|
||||
use syntax::source_map::DUMMY_SP;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
use rustc::mir::interpret::{
|
||||
EvalResult, EvalError, EvalErrorKind, GlobalId,
|
||||
Scalar, AllocId, Allocation, ConstValue,
|
||||
Scalar, AllocId, Allocation, ConstValue, AllocType,
|
||||
};
|
||||
use super::{
|
||||
Place, PlaceExtra, PlaceTy, MemPlace, OpTy, Operand, Value,
|
||||
EvalContext, StackPopCleanup, Memory, MemoryKind, MPlaceTy,
|
||||
EvalContext, StackPopCleanup, MemoryKind, Memory,
|
||||
};
|
||||
|
||||
pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
|
||||
@ -50,7 +49,7 @@ pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
|
||||
span,
|
||||
mir,
|
||||
return_place: Place::null(tcx),
|
||||
return_to_block: StackPopCleanup::None,
|
||||
return_to_block: StackPopCleanup::Goto(None), // never pop
|
||||
stmt: 0,
|
||||
});
|
||||
Ok(ecx)
|
||||
@ -71,7 +70,7 @@ pub fn mk_eval_cx<'a, 'tcx>(
|
||||
mir.span,
|
||||
mir,
|
||||
Place::null(tcx),
|
||||
StackPopCleanup::None,
|
||||
StackPopCleanup::Goto(None), // never pop
|
||||
)?;
|
||||
Ok(ecx)
|
||||
}
|
||||
@ -110,8 +109,10 @@ pub fn op_to_const<'tcx>(
|
||||
assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= op.layout.size.bytes());
|
||||
let mut alloc = alloc.clone();
|
||||
alloc.align = align;
|
||||
// FIXME shouldnt it be the case that `mark_static_initialized` has already
|
||||
// interned this? I thought that is the entire point of that `FinishStatic` stuff?
|
||||
let alloc = ecx.tcx.intern_const_alloc(alloc);
|
||||
ConstValue::ByRef(alloc, ptr.offset)
|
||||
ConstValue::ByRef(ptr.alloc_id, alloc, ptr.offset)
|
||||
},
|
||||
Ok(Value::Scalar(x)) =>
|
||||
ConstValue::Scalar(x.not_undef()?),
|
||||
@ -134,7 +135,6 @@ fn eval_body_and_ecx<'a, 'mir, 'tcx>(
|
||||
mir: Option<&'mir mir::Mir<'tcx>>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
) -> (EvalResult<'tcx, OpTy<'tcx>>, EvalContext<'a, 'mir, 'tcx, CompileTimeEvaluator>) {
|
||||
debug!("eval_body_and_ecx: {:?}, {:?}", cid, param_env);
|
||||
// we start out with the best span we have
|
||||
// and try improving it down the road when more information is available
|
||||
let span = tcx.def_span(cid.instance.def_id());
|
||||
@ -151,7 +151,7 @@ fn eval_body_using_ecx<'a, 'mir, 'tcx>(
|
||||
mir: Option<&'mir mir::Mir<'tcx>>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
) -> EvalResult<'tcx, OpTy<'tcx>> {
|
||||
debug!("eval_body: {:?}, {:?}", cid, param_env);
|
||||
debug!("eval_body_using_ecx: {:?}, {:?}", cid, param_env);
|
||||
let tcx = ecx.tcx.tcx;
|
||||
let mut mir = match mir {
|
||||
Some(mir) => mir,
|
||||
@ -170,10 +170,11 @@ fn eval_body_using_ecx<'a, 'mir, 'tcx>(
|
||||
} else {
|
||||
Mutability::Immutable
|
||||
};
|
||||
let cleanup = StackPopCleanup::MarkStatic(mutability);
|
||||
let cleanup = StackPopCleanup::FinishStatic(mutability);
|
||||
|
||||
let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
|
||||
let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
|
||||
trace!("const_eval: pushing stack frame for global: {}{}", name, prom);
|
||||
trace!("eval_body_using_ecx: pushing stack frame for global: {}{}", name, prom);
|
||||
assert!(mir.arg_count == 0);
|
||||
ecx.push_stack_frame(
|
||||
cid.instance,
|
||||
@ -184,8 +185,9 @@ fn eval_body_using_ecx<'a, 'mir, 'tcx>(
|
||||
)?;
|
||||
|
||||
// The main interpreter loop.
|
||||
while ecx.step()? {}
|
||||
ecx.run()?;
|
||||
|
||||
debug!("eval_body_using_ecx done: {:?}", *ret);
|
||||
Ok(ret.into())
|
||||
}
|
||||
|
||||
@ -234,72 +236,36 @@ impl Error for ConstEvalError {
|
||||
}
|
||||
}
|
||||
|
||||
impl super::IsStatic for ! {
|
||||
fn is_static(self) -> bool {
|
||||
// unreachable
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
|
||||
type MemoryData = ();
|
||||
type MemoryKinds = !;
|
||||
fn eval_fn_call<'a>(
|
||||
|
||||
fn find_fn<'a>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
destination: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
||||
args: &[OpTy<'tcx>],
|
||||
span: Span,
|
||||
) -> EvalResult<'tcx, bool> {
|
||||
dest: Option<PlaceTy<'tcx>>,
|
||||
ret: Option<mir::BasicBlock>,
|
||||
) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> {
|
||||
debug!("eval_fn_call: {:?}", instance);
|
||||
if !ecx.tcx.is_const_fn(instance.def_id()) {
|
||||
let def_id = instance.def_id();
|
||||
// Some fn calls are actually BinOp intrinsics
|
||||
let _: ! = if let Some((op, oflo)) = ecx.tcx.is_binop_lang_item(def_id) {
|
||||
let (dest, bb) = destination.expect("128 lowerings can't diverge");
|
||||
let l = ecx.read_value(args[0])?;
|
||||
let r = ecx.read_value(args[1])?;
|
||||
if oflo {
|
||||
ecx.binop_with_overflow(op, l, r, dest)?;
|
||||
} else {
|
||||
ecx.binop_ignore_overflow(op, l, r, dest)?;
|
||||
}
|
||||
ecx.goto_block(bb);
|
||||
return Ok(true);
|
||||
} else if Some(def_id) == ecx.tcx.lang_items().panic_fn() {
|
||||
assert!(args.len() == 1);
|
||||
// &(&'static str, &'static str, u32, u32)
|
||||
let ptr = ecx.read_value(args[0])?;
|
||||
let place = ecx.ref_to_mplace(ptr)?;
|
||||
let (msg, file, line, col) = (
|
||||
place_field(ecx, 0, place)?,
|
||||
place_field(ecx, 1, place)?,
|
||||
place_field(ecx, 2, place)?,
|
||||
place_field(ecx, 3, place)?,
|
||||
);
|
||||
|
||||
let msg = to_str(ecx, msg)?;
|
||||
let file = to_str(ecx, file)?;
|
||||
let line = to_u32(line)?;
|
||||
let col = to_u32(col)?;
|
||||
return Err(EvalErrorKind::Panic { msg, file, line, col }.into());
|
||||
} else if Some(def_id) == ecx.tcx.lang_items().begin_panic_fn() {
|
||||
assert!(args.len() == 2);
|
||||
// &'static str, &(&'static str, u32, u32)
|
||||
let msg = ecx.read_value(args[0])?;
|
||||
let ptr = ecx.read_value(args[1])?;
|
||||
let place = ecx.ref_to_mplace(ptr)?;
|
||||
let (file, line, col) = (
|
||||
place_field(ecx, 0, place)?,
|
||||
place_field(ecx, 1, place)?,
|
||||
place_field(ecx, 2, place)?,
|
||||
);
|
||||
|
||||
let msg = to_str(ecx, msg.value)?;
|
||||
let file = to_str(ecx, file)?;
|
||||
let line = to_u32(line)?;
|
||||
let col = to_u32(col)?;
|
||||
return Err(EvalErrorKind::Panic { msg, file, line, col }.into());
|
||||
} else {
|
||||
return Err(
|
||||
ConstEvalError::NotConst(format!("calling non-const fn `{}`", instance)).into(),
|
||||
);
|
||||
};
|
||||
if ecx.hook_fn(instance, args, dest)? {
|
||||
ecx.goto_block(ret)?; // fully evaluated and done
|
||||
return Ok(None);
|
||||
}
|
||||
let mir = match ecx.load_mir(instance.def) {
|
||||
if !ecx.tcx.is_const_fn(instance.def_id()) {
|
||||
return Err(
|
||||
ConstEvalError::NotConst(format!("calling non-const fn `{}`", instance)).into(),
|
||||
);
|
||||
}
|
||||
// This is a const fn. Call it.
|
||||
Ok(Some(match ecx.load_mir(instance.def) {
|
||||
Ok(mir) => mir,
|
||||
Err(err) => {
|
||||
if let EvalErrorKind::NoMirFor(ref path) = err.kind {
|
||||
@ -310,94 +276,23 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
let (return_place, return_to_block) = match destination {
|
||||
Some((place, block)) => (*place, StackPopCleanup::Goto(block)),
|
||||
None => (Place::null(&ecx), StackPopCleanup::None),
|
||||
};
|
||||
|
||||
ecx.push_stack_frame(
|
||||
instance,
|
||||
span,
|
||||
mir,
|
||||
return_place,
|
||||
return_to_block,
|
||||
)?;
|
||||
|
||||
Ok(false)
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
fn call_intrinsic<'a>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
args: &[OpTy<'tcx>],
|
||||
dest: PlaceTy<'tcx>,
|
||||
target: mir::BasicBlock,
|
||||
) -> EvalResult<'tcx> {
|
||||
let substs = instance.substs;
|
||||
|
||||
let intrinsic_name = &ecx.tcx.item_name(instance.def_id()).as_str()[..];
|
||||
match intrinsic_name {
|
||||
"min_align_of" => {
|
||||
let elem_ty = substs.type_at(0);
|
||||
let elem_align = ecx.layout_of(elem_ty)?.align.abi();
|
||||
let align_val = Scalar::Bits {
|
||||
bits: elem_align as u128,
|
||||
size: dest.layout.size.bytes() as u8,
|
||||
};
|
||||
ecx.write_scalar(align_val, dest)?;
|
||||
}
|
||||
|
||||
"size_of" => {
|
||||
let ty = substs.type_at(0);
|
||||
let size = ecx.layout_of(ty)?.size.bytes() as u128;
|
||||
let size_val = Scalar::Bits {
|
||||
bits: size,
|
||||
size: dest.layout.size.bytes() as u8,
|
||||
};
|
||||
ecx.write_scalar(size_val, dest)?;
|
||||
}
|
||||
|
||||
"type_id" => {
|
||||
let ty = substs.type_at(0);
|
||||
let type_id = ecx.tcx.type_id_hash(ty) as u128;
|
||||
let id_val = Scalar::Bits {
|
||||
bits: type_id,
|
||||
size: dest.layout.size.bytes() as u8,
|
||||
};
|
||||
ecx.write_scalar(id_val, dest)?;
|
||||
}
|
||||
"ctpop" | "cttz" | "cttz_nonzero" | "ctlz" | "ctlz_nonzero" | "bswap" => {
|
||||
let ty = substs.type_at(0);
|
||||
let layout_of = ecx.layout_of(ty)?;
|
||||
let bits = ecx.read_scalar(args[0])?.to_bits(layout_of.size)?;
|
||||
let kind = match layout_of.abi {
|
||||
ty::layout::Abi::Scalar(ref scalar) => scalar.value,
|
||||
_ => Err(::rustc::mir::interpret::EvalErrorKind::TypeNotPrimitive(ty))?,
|
||||
};
|
||||
let out_val = if intrinsic_name.ends_with("_nonzero") {
|
||||
if bits == 0 {
|
||||
return err!(Intrinsic(format!("{} called on 0", intrinsic_name)));
|
||||
}
|
||||
numeric_intrinsic(intrinsic_name.trim_right_matches("_nonzero"), bits, kind)?
|
||||
} else {
|
||||
numeric_intrinsic(intrinsic_name, bits, kind)?
|
||||
};
|
||||
ecx.write_scalar(out_val, dest)?;
|
||||
}
|
||||
|
||||
name => return Err(
|
||||
ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", name)).into()
|
||||
),
|
||||
if ecx.emulate_intrinsic(instance, args, dest)? {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ecx.goto_block(target);
|
||||
|
||||
// Since we pushed no stack frame, the main loop will act
|
||||
// as if the call just completed and it's returning to the
|
||||
// current frame.
|
||||
Ok(())
|
||||
// An intrinsic that we do not support
|
||||
let intrinsic_name = &ecx.tcx.item_name(instance.def_id()).as_str()[..];
|
||||
Err(
|
||||
ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()
|
||||
)
|
||||
}
|
||||
|
||||
fn try_ptr_op<'a>(
|
||||
@ -417,23 +312,17 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
|
||||
}
|
||||
}
|
||||
|
||||
fn mark_static_initialized<'a>(
|
||||
_mem: &mut Memory<'a, 'mir, 'tcx, Self>,
|
||||
_id: AllocId,
|
||||
_mutability: Mutability,
|
||||
) -> EvalResult<'tcx, bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn init_static<'a>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
cid: GlobalId<'tcx>,
|
||||
) -> EvalResult<'tcx, AllocId> {
|
||||
Ok(ecx
|
||||
.tcx
|
||||
.alloc_map
|
||||
.lock()
|
||||
.intern_static(cid.instance.def_id()))
|
||||
fn access_static_mut<'a, 'm>(
|
||||
mem: &'m mut Memory<'a, 'mir, 'tcx, Self>,
|
||||
id: AllocId,
|
||||
) -> EvalResult<'tcx, &'m mut Allocation> {
|
||||
// This is always an error, we do not allow mutating statics
|
||||
match mem.tcx.alloc_map.lock().get(id) {
|
||||
Some(AllocType::Memory(..)) |
|
||||
Some(AllocType::Static(..)) => err!(ModifiedConstantMemory),
|
||||
Some(AllocType::Function(..)) => err!(DerefFunctionPointer),
|
||||
None => err!(DanglingPointerDeref),
|
||||
}
|
||||
}
|
||||
|
||||
fn box_alloc<'a>(
|
||||
@ -456,40 +345,6 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
|
||||
}
|
||||
}
|
||||
|
||||
fn place_field<'a, 'tcx, 'mir>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, CompileTimeEvaluator>,
|
||||
i: u64,
|
||||
place: MPlaceTy<'tcx>,
|
||||
) -> EvalResult<'tcx, Value> {
|
||||
let place = ecx.mplace_field(place, i)?;
|
||||
Ok(ecx.try_read_value_from_mplace(place)?.expect("bad panic arg layout"))
|
||||
}
|
||||
|
||||
fn to_str<'a, 'tcx, 'mir>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, CompileTimeEvaluator>,
|
||||
val: Value,
|
||||
) -> EvalResult<'tcx, Symbol> {
|
||||
if let Value::ScalarPair(ptr, len) = val {
|
||||
let len = len.not_undef()?.to_bits(ecx.memory.pointer_size())?;
|
||||
let bytes = ecx.memory.read_bytes(ptr.not_undef()?, Size::from_bytes(len as u64))?;
|
||||
let str = ::std::str::from_utf8(bytes)
|
||||
.map_err(|err| EvalErrorKind::ValidationFailure(err.to_string()))?;
|
||||
Ok(Symbol::intern(str))
|
||||
} else {
|
||||
bug!("panic arg is not a str")
|
||||
}
|
||||
}
|
||||
|
||||
fn to_u32<'a, 'tcx, 'mir>(
|
||||
val: Value,
|
||||
) -> EvalResult<'tcx, u32> {
|
||||
if let Value::Scalar(n) = val {
|
||||
Ok(n.not_undef()?.to_bits(Size::from_bits(32))? as u32)
|
||||
} else {
|
||||
bug!("panic arg is not a str")
|
||||
}
|
||||
}
|
||||
|
||||
/// Project to a field of a (variant of a) const
|
||||
pub fn const_field<'a, 'tcx>(
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
@ -542,7 +397,7 @@ pub fn const_to_allocation_provider<'a, 'tcx>(
|
||||
val: &'tcx ty::Const<'tcx>,
|
||||
) -> &'tcx Allocation {
|
||||
match val.val {
|
||||
ConstValue::ByRef(alloc, offset) => {
|
||||
ConstValue::ByRef(_, alloc, offset) => {
|
||||
assert_eq!(offset.bytes(), 0);
|
||||
return alloc;
|
||||
},
|
||||
@ -627,22 +482,42 @@ pub fn const_eval_provider<'a, 'tcx>(
|
||||
})
|
||||
}
|
||||
|
||||
fn numeric_intrinsic<'tcx>(
|
||||
name: &str,
|
||||
bits: u128,
|
||||
kind: Primitive,
|
||||
) -> EvalResult<'tcx, Scalar> {
|
||||
let size = match kind {
|
||||
Primitive::Int(integer, _) => integer.size(),
|
||||
_ => bug!("invalid `{}` argument: {:?}", name, bits),
|
||||
|
||||
/// Helper function to obtain the global (tcx) allocation for a static
|
||||
pub fn static_alloc<'a, 'tcx>(
|
||||
tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
|
||||
id: AllocId,
|
||||
) -> EvalResult<'tcx, &'tcx Allocation> {
|
||||
let alloc = tcx.alloc_map.lock().get(id);
|
||||
let def_id = match alloc {
|
||||
Some(AllocType::Memory(mem)) => {
|
||||
return Ok(mem)
|
||||
}
|
||||
Some(AllocType::Function(..)) => {
|
||||
return err!(DerefFunctionPointer)
|
||||
}
|
||||
Some(AllocType::Static(did)) => {
|
||||
did
|
||||
}
|
||||
None =>
|
||||
return err!(DanglingPointerDeref),
|
||||
};
|
||||
let extra = 128 - size.bits() as u128;
|
||||
let bits_out = match name {
|
||||
"ctpop" => bits.count_ones() as u128,
|
||||
"ctlz" => bits.leading_zeros() as u128 - extra,
|
||||
"cttz" => (bits << extra).trailing_zeros() as u128 - extra,
|
||||
"bswap" => (bits << extra).swap_bytes(),
|
||||
_ => bug!("not a numeric intrinsic: {}", name),
|
||||
// We got a "lazy" static that has not been computed yet, do some work
|
||||
trace!("static_alloc: Need to compute {:?}", def_id);
|
||||
if tcx.is_foreign_item(def_id) {
|
||||
return err!(ReadForeignStatic);
|
||||
}
|
||||
let instance = Instance::mono(tcx.tcx, def_id);
|
||||
let gid = GlobalId {
|
||||
instance,
|
||||
promoted: None,
|
||||
};
|
||||
Ok(Scalar::Bits { bits: bits_out, size: size.bytes() as u8 })
|
||||
tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|err| {
|
||||
// no need to report anything, the const_eval call takes care of that for statics
|
||||
assert!(tcx.is_static(def_id).is_some());
|
||||
EvalErrorKind::ReferencedConstant(err).into()
|
||||
}).map(|val| {
|
||||
// FIXME We got our static (will be a ByRef), now we make a *copy*?!?
|
||||
tcx.const_to_allocation(val)
|
||||
})
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ pub struct Frame<'mir, 'tcx: 'mir> {
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Return place and locals
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// The block to return to when returning from the current stack frame
|
||||
/// Work to perform when returning from this function
|
||||
pub return_to_block: StackPopCleanup,
|
||||
|
||||
/// The location where the result of the current stack frame should be written to.
|
||||
@ -157,6 +157,19 @@ impl<'mir, 'tcx: 'mir> Hash for Frame<'mir, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum StackPopCleanup {
|
||||
/// The stackframe existed to compute the initial value of a static/constant.
|
||||
/// Call `M::intern_static` on the return value and all allocations it references
|
||||
/// when this is done. Must have a valid pointer as return place.
|
||||
FinishStatic(Mutability),
|
||||
/// Jump to the next block in the caller, or cause UB if None (that's a function
|
||||
/// that may never return).
|
||||
Goto(Option<mir::BasicBlock>),
|
||||
/// Just do nohing: Used by Main and for the box_alloc hook in miri
|
||||
None,
|
||||
}
|
||||
|
||||
// State of a local variable
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum LocalValue {
|
||||
@ -251,20 +264,6 @@ impl<'a, 'mir, 'tcx, M> InfiniteLoopDetector<'a, 'mir, 'tcx, M>
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum StackPopCleanup {
|
||||
/// The stackframe existed to compute the initial value of a static/constant, make sure it
|
||||
/// isn't modifyable afterwards in case of constants.
|
||||
/// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
|
||||
/// references or deallocated
|
||||
MarkStatic(Mutability),
|
||||
/// A regular stackframe added due to a function call will need to get forwarded to the next
|
||||
/// block
|
||||
Goto(mir::BasicBlock),
|
||||
/// The main function and diverging functions have nowhere to return to
|
||||
None,
|
||||
}
|
||||
|
||||
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
|
||||
#[inline]
|
||||
fn data_layout(&self) -> &layout::TargetDataLayout {
|
||||
@ -388,7 +387,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
|
||||
}
|
||||
|
||||
pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
|
||||
let ptr = self.memory.allocate_bytes(s.as_bytes());
|
||||
let ptr = self.memory.allocate_static_bytes(s.as_bytes());
|
||||
Ok(Value::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx))
|
||||
}
|
||||
|
||||
@ -628,25 +627,22 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
|
||||
|
||||
pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
|
||||
::log_settings::settings().indentation -= 1;
|
||||
M::end_region(self, None)?;
|
||||
let frame = self.stack.pop().expect(
|
||||
"tried to pop a stack frame, but there were none",
|
||||
);
|
||||
match frame.return_to_block {
|
||||
StackPopCleanup::MarkStatic(mutable) => {
|
||||
if let Place::Ptr(MemPlace { ptr, .. }) = frame.return_place {
|
||||
// FIXME: to_ptr()? might be too extreme here,
|
||||
// static zsts might reach this under certain conditions
|
||||
self.memory.mark_static_initialized(
|
||||
ptr.to_ptr()?.alloc_id,
|
||||
mutable,
|
||||
)?
|
||||
} else {
|
||||
bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
|
||||
}
|
||||
StackPopCleanup::FinishStatic(mutability) => {
|
||||
let mplace = frame.return_place.to_mem_place();
|
||||
// to_ptr should be okay here; it is the responsibility of whoever pushed
|
||||
// this frame to make sure that this works.
|
||||
let ptr = mplace.ptr.to_ptr()?;
|
||||
assert_eq!(ptr.offset.bytes(), 0);
|
||||
self.memory.mark_static_initialized(ptr.alloc_id, mutability)?;
|
||||
}
|
||||
StackPopCleanup::Goto(target) => self.goto_block(target),
|
||||
StackPopCleanup::None => {}
|
||||
StackPopCleanup::Goto(block) => {
|
||||
self.goto_block(block)?;
|
||||
}
|
||||
StackPopCleanup::None => { }
|
||||
}
|
||||
// deallocate all locals that are backed by an allocation
|
||||
for local in frame.locals {
|
||||
|
171
src/librustc_mir/interpret/intrinsics.rs
Normal file
171
src/librustc_mir/interpret/intrinsics.rs
Normal file
@ -0,0 +1,171 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Intrinsics and other functions that the miri engine executes without
|
||||
//! looking at their MIR. Intrinsics/functions supported here are shared by CTFE
|
||||
//! and miri.
|
||||
|
||||
use syntax::symbol::Symbol;
|
||||
use rustc::ty;
|
||||
use rustc::ty::layout::{LayoutOf, Primitive};
|
||||
use rustc::mir::interpret::{
|
||||
EvalResult, EvalErrorKind, Scalar,
|
||||
};
|
||||
|
||||
use super::{
|
||||
Machine, PlaceTy, OpTy, EvalContext,
|
||||
};
|
||||
|
||||
|
||||
fn numeric_intrinsic<'tcx>(
|
||||
name: &str,
|
||||
bits: u128,
|
||||
kind: Primitive,
|
||||
) -> EvalResult<'tcx, Scalar> {
|
||||
let size = match kind {
|
||||
Primitive::Int(integer, _) => integer.size(),
|
||||
_ => bug!("invalid `{}` argument: {:?}", name, bits),
|
||||
};
|
||||
let extra = 128 - size.bits() as u128;
|
||||
let bits_out = match name {
|
||||
"ctpop" => bits.count_ones() as u128,
|
||||
"ctlz" => bits.leading_zeros() as u128 - extra,
|
||||
"cttz" => (bits << extra).trailing_zeros() as u128 - extra,
|
||||
"bswap" => (bits << extra).swap_bytes(),
|
||||
_ => bug!("not a numeric intrinsic: {}", name),
|
||||
};
|
||||
Ok(Scalar::Bits { bits: bits_out, size: size.bytes() as u8 })
|
||||
}
|
||||
|
||||
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
/// Returns whether emulation happened.
|
||||
pub fn emulate_intrinsic(
|
||||
&mut self,
|
||||
instance: ty::Instance<'tcx>,
|
||||
args: &[OpTy<'tcx>],
|
||||
dest: PlaceTy<'tcx>,
|
||||
) -> EvalResult<'tcx, bool> {
|
||||
let substs = instance.substs;
|
||||
|
||||
let intrinsic_name = &self.tcx.item_name(instance.def_id()).as_str()[..];
|
||||
match intrinsic_name {
|
||||
"min_align_of" => {
|
||||
let elem_ty = substs.type_at(0);
|
||||
let elem_align = self.layout_of(elem_ty)?.align.abi();
|
||||
let align_val = Scalar::Bits {
|
||||
bits: elem_align as u128,
|
||||
size: dest.layout.size.bytes() as u8,
|
||||
};
|
||||
self.write_scalar(align_val, dest)?;
|
||||
}
|
||||
|
||||
"size_of" => {
|
||||
let ty = substs.type_at(0);
|
||||
let size = self.layout_of(ty)?.size.bytes() as u128;
|
||||
let size_val = Scalar::Bits {
|
||||
bits: size,
|
||||
size: dest.layout.size.bytes() as u8,
|
||||
};
|
||||
self.write_scalar(size_val, dest)?;
|
||||
}
|
||||
|
||||
"type_id" => {
|
||||
let ty = substs.type_at(0);
|
||||
let type_id = self.tcx.type_id_hash(ty) as u128;
|
||||
let id_val = Scalar::Bits {
|
||||
bits: type_id,
|
||||
size: dest.layout.size.bytes() as u8,
|
||||
};
|
||||
self.write_scalar(id_val, dest)?;
|
||||
}
|
||||
"ctpop" | "cttz" | "cttz_nonzero" | "ctlz" | "ctlz_nonzero" | "bswap" => {
|
||||
let ty = substs.type_at(0);
|
||||
let layout_of = self.layout_of(ty)?;
|
||||
let bits = self.read_scalar(args[0])?.to_bits(layout_of.size)?;
|
||||
let kind = match layout_of.abi {
|
||||
ty::layout::Abi::Scalar(ref scalar) => scalar.value,
|
||||
_ => Err(::rustc::mir::interpret::EvalErrorKind::TypeNotPrimitive(ty))?,
|
||||
};
|
||||
let out_val = if intrinsic_name.ends_with("_nonzero") {
|
||||
if bits == 0 {
|
||||
return err!(Intrinsic(format!("{} called on 0", intrinsic_name)));
|
||||
}
|
||||
numeric_intrinsic(intrinsic_name.trim_right_matches("_nonzero"), bits, kind)?
|
||||
} else {
|
||||
numeric_intrinsic(intrinsic_name, bits, kind)?
|
||||
};
|
||||
self.write_scalar(out_val, dest)?;
|
||||
}
|
||||
|
||||
_ => return Ok(false),
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// "Intercept" a function call because we have something special to do for it.
|
||||
/// Returns whether an intercept happened.
|
||||
pub fn hook_fn(
|
||||
&mut self,
|
||||
instance: ty::Instance<'tcx>,
|
||||
args: &[OpTy<'tcx>],
|
||||
dest: Option<PlaceTy<'tcx>>,
|
||||
) -> EvalResult<'tcx, bool> {
|
||||
let def_id = instance.def_id();
|
||||
// Some fn calls are actually BinOp intrinsics
|
||||
if let Some((op, oflo)) = self.tcx.is_binop_lang_item(def_id) {
|
||||
let dest = dest.expect("128 lowerings can't diverge");
|
||||
let l = self.read_value(args[0])?;
|
||||
let r = self.read_value(args[1])?;
|
||||
if oflo {
|
||||
self.binop_with_overflow(op, l, r, dest)?;
|
||||
} else {
|
||||
self.binop_ignore_overflow(op, l, r, dest)?;
|
||||
}
|
||||
return Ok(true);
|
||||
} else if Some(def_id) == self.tcx.lang_items().panic_fn() {
|
||||
assert!(args.len() == 1);
|
||||
// &(&'static str, &'static str, u32, u32)
|
||||
let ptr = self.read_value(args[0])?;
|
||||
let place = self.ref_to_mplace(ptr)?;
|
||||
let (msg, file, line, col) = (
|
||||
self.mplace_field(place, 0)?,
|
||||
self.mplace_field(place, 1)?,
|
||||
self.mplace_field(place, 2)?,
|
||||
self.mplace_field(place, 3)?,
|
||||
);
|
||||
|
||||
let msg = Symbol::intern(self.read_str(msg.into())?);
|
||||
let file = Symbol::intern(self.read_str(file.into())?);
|
||||
let line = self.read_scalar(line.into())?.to_u32()?;
|
||||
let col = self.read_scalar(col.into())?.to_u32()?;
|
||||
return Err(EvalErrorKind::Panic { msg, file, line, col }.into());
|
||||
} else if Some(def_id) == self.tcx.lang_items().begin_panic_fn() {
|
||||
assert!(args.len() == 2);
|
||||
// &'static str, &(&'static str, u32, u32)
|
||||
let msg = args[0];
|
||||
let ptr = self.read_value(args[1])?;
|
||||
let place = self.ref_to_mplace(ptr)?;
|
||||
let (file, line, col) = (
|
||||
self.mplace_field(place, 0)?,
|
||||
self.mplace_field(place, 1)?,
|
||||
self.mplace_field(place, 2)?,
|
||||
);
|
||||
|
||||
let msg = Symbol::intern(self.read_str(msg)?);
|
||||
let file = Symbol::intern(self.read_str(file.into())?);
|
||||
let line = self.read_scalar(line.into())?.to_u32()?;
|
||||
let col = self.read_scalar(col.into())?.to_u32()?;
|
||||
return Err(EvalErrorKind::Panic { msg, file, line, col }.into());
|
||||
} else {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
}
|
@ -14,15 +14,18 @@
|
||||
|
||||
use std::hash::Hash;
|
||||
|
||||
use rustc::mir::interpret::{AllocId, EvalResult, Scalar, Pointer, AccessKind, GlobalId};
|
||||
use rustc::mir::interpret::{AllocId, Allocation, EvalResult, Scalar};
|
||||
use super::{EvalContext, PlaceTy, OpTy, Memory};
|
||||
|
||||
use rustc::mir;
|
||||
use rustc::ty::{self, layout::TyLayout};
|
||||
use rustc::ty::layout::Size;
|
||||
use syntax::source_map::Span;
|
||||
use syntax::ast::Mutability;
|
||||
|
||||
/// Used by the machine to tell if a certain allocation is for static memory
|
||||
pub trait IsStatic {
|
||||
fn is_static(self) -> bool;
|
||||
}
|
||||
|
||||
/// Methods of this trait signifies a point where CTFE evaluation would fail
|
||||
/// and some use case dependent behaviour can instead be applied
|
||||
pub trait Machine<'mir, 'tcx>: Clone + Eq + Hash {
|
||||
@ -30,29 +33,33 @@ pub trait Machine<'mir, 'tcx>: Clone + Eq + Hash {
|
||||
type MemoryData: Clone + Eq + Hash;
|
||||
|
||||
/// Additional memory kinds a machine wishes to distinguish from the builtin ones
|
||||
type MemoryKinds: ::std::fmt::Debug + PartialEq + Copy + Clone;
|
||||
type MemoryKinds: ::std::fmt::Debug + Copy + Clone + Eq + Hash + IsStatic;
|
||||
|
||||
/// Entry point to all function calls.
|
||||
///
|
||||
/// Returns Ok(true) when the function was handled completely
|
||||
/// e.g. due to missing mir
|
||||
///
|
||||
/// Returns Ok(false) if a new stack frame was pushed
|
||||
fn eval_fn_call<'a>(
|
||||
/// Returns either the mir to use for the call, or `None` if execution should
|
||||
/// just proceed (which usually means this hook did all the work that the
|
||||
/// called function should usually have done). In the latter case, it is
|
||||
/// this hook's responsibility to call `goto_block(ret)` to advance the instruction pointer!
|
||||
/// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
|
||||
/// nor just jump to `ret`, but instead push their own stack frame.)
|
||||
/// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
|
||||
/// was used.
|
||||
fn find_fn<'a>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
destination: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
||||
args: &[OpTy<'tcx>],
|
||||
span: Span,
|
||||
) -> EvalResult<'tcx, bool>;
|
||||
dest: Option<PlaceTy<'tcx>>,
|
||||
ret: Option<mir::BasicBlock>,
|
||||
) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>>;
|
||||
|
||||
/// directly process an intrinsic without pushing a stack frame.
|
||||
/// Directly process an intrinsic without pushing a stack frame.
|
||||
/// If this returns successfully, the engine will take care of jumping to the next block.
|
||||
fn call_intrinsic<'a>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
args: &[OpTy<'tcx>],
|
||||
dest: PlaceTy<'tcx>,
|
||||
target: mir::BasicBlock,
|
||||
) -> EvalResult<'tcx>;
|
||||
|
||||
/// Called for all binary operations except on float types.
|
||||
@ -70,19 +77,11 @@ pub trait Machine<'mir, 'tcx>: Clone + Eq + Hash {
|
||||
right_layout: TyLayout<'tcx>,
|
||||
) -> EvalResult<'tcx, Option<(Scalar, bool)>>;
|
||||
|
||||
/// Called when trying to mark machine defined `MemoryKinds` as static
|
||||
fn mark_static_initialized<'a>(
|
||||
_mem: &mut Memory<'a, 'mir, 'tcx, Self>,
|
||||
_id: AllocId,
|
||||
_mutability: Mutability,
|
||||
) -> EvalResult<'tcx, bool>;
|
||||
|
||||
/// Called when requiring a pointer to a static. Non const eval can
|
||||
/// create a mutable memory location for `static mut`
|
||||
fn init_static<'a>(
|
||||
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
cid: GlobalId<'tcx>,
|
||||
) -> EvalResult<'tcx, AllocId>;
|
||||
/// Called when requiring mutable access to data in a static.
|
||||
fn access_static_mut<'a, 'm>(
|
||||
mem: &'m mut Memory<'a, 'mir, 'tcx, Self>,
|
||||
id: AllocId,
|
||||
) -> EvalResult<'tcx, &'m mut Allocation>;
|
||||
|
||||
/// Heap allocations via the `box` keyword
|
||||
///
|
||||
@ -99,35 +98,7 @@ pub trait Machine<'mir, 'tcx>: Clone + Eq + Hash {
|
||||
mutability: Mutability,
|
||||
) -> EvalResult<'tcx>;
|
||||
|
||||
fn check_locks<'a>(
|
||||
_mem: &Memory<'a, 'mir, 'tcx, Self>,
|
||||
_ptr: Pointer,
|
||||
_size: Size,
|
||||
_access: AccessKind,
|
||||
) -> EvalResult<'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_lock<'a>(
|
||||
_mem: &mut Memory<'a, 'mir, 'tcx, Self>,
|
||||
_id: AllocId,
|
||||
) {}
|
||||
|
||||
fn free_lock<'a>(
|
||||
_mem: &mut Memory<'a, 'mir, 'tcx, Self>,
|
||||
_id: AllocId,
|
||||
_len: u64,
|
||||
) -> EvalResult<'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end_region<'a>(
|
||||
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
_reg: Option<::rustc::middle::region::Scope>,
|
||||
) -> EvalResult<'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Execute a validation operation
|
||||
fn validation_op<'a>(
|
||||
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
|
||||
_op: ::rustc::mir::ValidationOp,
|
||||
|
@ -20,26 +20,23 @@ use std::collections::VecDeque;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ptr;
|
||||
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ty::Instance;
|
||||
use rustc::ty::ParamEnv;
|
||||
use rustc::ty::query::TyCtxtAt;
|
||||
use rustc::ty::layout::{self, Align, TargetDataLayout, Size};
|
||||
use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, ScalarMaybeUndef,
|
||||
EvalResult, Scalar, EvalErrorKind, GlobalId, AllocType, truncate};
|
||||
use rustc::mir::interpret::{Pointer, AllocId, Allocation, ScalarMaybeUndef,
|
||||
EvalResult, Scalar, EvalErrorKind, AllocType, truncate};
|
||||
pub use rustc::mir::interpret::{write_target_uint, read_target_uint};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap, FxHasher};
|
||||
|
||||
use syntax::ast::Mutability;
|
||||
|
||||
use super::{EvalContext, Machine};
|
||||
|
||||
use super::{EvalContext, Machine, IsStatic, static_alloc};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Allocations and pointers
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
|
||||
pub enum MemoryKind<T> {
|
||||
/// Error if deallocated except during a stack pop
|
||||
Stack,
|
||||
@ -47,6 +44,15 @@ pub enum MemoryKind<T> {
|
||||
Machine(T),
|
||||
}
|
||||
|
||||
impl<T: IsStatic> IsStatic for MemoryKind<T> {
|
||||
fn is_static(self) -> bool {
|
||||
match self {
|
||||
MemoryKind::Stack => false,
|
||||
MemoryKind::Machine(kind) => kind.is_static(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Top-level interpreter memory
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
@ -56,11 +62,10 @@ pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
|
||||
/// Additional data required by the Machine
|
||||
pub data: M::MemoryData,
|
||||
|
||||
/// Helps guarantee that stack allocations aren't deallocated via `rust_deallocate`
|
||||
alloc_kind: FxHashMap<AllocId, MemoryKind<M::MemoryKinds>>,
|
||||
|
||||
/// Actual memory allocations (arbitrary bytes, may contain pointers into other allocations).
|
||||
alloc_map: FxHashMap<AllocId, Allocation>,
|
||||
/// Allocations local to this instance of the miri engine. The kind
|
||||
/// helps ensure that the same mechanism is used for allocation and
|
||||
/// deallocation.
|
||||
alloc_map: FxHashMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
|
||||
|
||||
pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
|
||||
}
|
||||
@ -77,13 +82,11 @@ impl<'a, 'mir, 'tcx, M> PartialEq for Memory<'a, 'mir, 'tcx, M>
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let Memory {
|
||||
data,
|
||||
alloc_kind,
|
||||
alloc_map,
|
||||
tcx: _,
|
||||
} = self;
|
||||
|
||||
*data == other.data
|
||||
&& *alloc_kind == other.alloc_kind
|
||||
&& *alloc_map == other.alloc_map
|
||||
}
|
||||
}
|
||||
@ -95,7 +98,6 @@ impl<'a, 'mir, 'tcx, M> Hash for Memory<'a, 'mir, 'tcx, M>
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let Memory {
|
||||
data,
|
||||
alloc_kind: _,
|
||||
alloc_map: _,
|
||||
tcx: _,
|
||||
} = self;
|
||||
@ -108,10 +110,11 @@ impl<'a, 'mir, 'tcx, M> Hash for Memory<'a, 'mir, 'tcx, M>
|
||||
// iteration orders, we use a commutative operation (in this case
|
||||
// addition, but XOR would also work), to combine the hash of each
|
||||
// `Allocation`.
|
||||
self.allocations()
|
||||
.map(|allocs| {
|
||||
self.alloc_map.iter()
|
||||
.map(|(&id, alloc)| {
|
||||
let mut h = FxHasher::default();
|
||||
allocs.hash(&mut h);
|
||||
id.hash(&mut h);
|
||||
alloc.hash(&mut h);
|
||||
h.finish()
|
||||
})
|
||||
.fold(0u64, |hash, x| hash.wrapping_add(x))
|
||||
@ -123,47 +126,36 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
pub fn new(tcx: TyCtxtAt<'a, 'tcx, 'tcx>, data: M::MemoryData) -> Self {
|
||||
Memory {
|
||||
data,
|
||||
alloc_kind: FxHashMap::default(),
|
||||
alloc_map: FxHashMap::default(),
|
||||
tcx,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn allocations<'x>(
|
||||
&'x self,
|
||||
) -> impl Iterator<Item = (AllocId, &'x Allocation)> {
|
||||
self.alloc_map.iter().map(|(&id, alloc)| (id, alloc))
|
||||
}
|
||||
|
||||
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> Pointer {
|
||||
self.tcx.alloc_map.lock().create_fn_alloc(instance).into()
|
||||
}
|
||||
|
||||
pub fn allocate_bytes(&mut self, bytes: &[u8]) -> Pointer {
|
||||
pub fn allocate_static_bytes(&mut self, bytes: &[u8]) -> Pointer {
|
||||
self.tcx.allocate_bytes(bytes).into()
|
||||
}
|
||||
|
||||
/// kind is `None` for statics
|
||||
pub fn allocate_value(
|
||||
pub fn allocate_with(
|
||||
&mut self,
|
||||
alloc: Allocation,
|
||||
kind: MemoryKind<M::MemoryKinds>,
|
||||
) -> EvalResult<'tcx, AllocId> {
|
||||
let id = self.tcx.alloc_map.lock().reserve();
|
||||
M::add_lock(self, id);
|
||||
self.alloc_map.insert(id, alloc);
|
||||
self.alloc_kind.insert(id, kind);
|
||||
self.alloc_map.insert(id, (kind, alloc));
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
/// kind is `None` for statics
|
||||
pub fn allocate(
|
||||
&mut self,
|
||||
size: Size,
|
||||
align: Align,
|
||||
kind: MemoryKind<M::MemoryKinds>,
|
||||
) -> EvalResult<'tcx, Pointer> {
|
||||
self.allocate_value(Allocation::undef(size, align), kind).map(Pointer::from)
|
||||
self.allocate_with(Allocation::undef(size, align), kind).map(Pointer::from)
|
||||
}
|
||||
|
||||
pub fn reallocate(
|
||||
@ -178,15 +170,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
if ptr.offset.bytes() != 0 {
|
||||
return err!(ReallocateNonBasePtr);
|
||||
}
|
||||
if self.alloc_map.contains_key(&ptr.alloc_id) {
|
||||
let alloc_kind = self.alloc_kind[&ptr.alloc_id];
|
||||
if alloc_kind != kind {
|
||||
return err!(ReallocatedWrongMemoryKind(
|
||||
format!("{:?}", alloc_kind),
|
||||
format!("{:?}", kind),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// For simplicities' sake, we implement reallocate as "alloc, copy, dealloc"
|
||||
let new_ptr = self.allocate(new_size, new_align, kind)?;
|
||||
@ -196,20 +179,25 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
new_ptr.into(),
|
||||
new_align,
|
||||
old_size.min(new_size),
|
||||
/*nonoverlapping*/
|
||||
true,
|
||||
/*nonoverlapping*/ true,
|
||||
)?;
|
||||
self.deallocate(ptr, Some((old_size, old_align)), kind)?;
|
||||
|
||||
Ok(new_ptr)
|
||||
}
|
||||
|
||||
pub fn is_static(&self, alloc_id: AllocId) -> bool {
|
||||
self.alloc_map.get(&alloc_id).map_or(true, |&(kind, _)| kind.is_static())
|
||||
}
|
||||
|
||||
/// Deallocate a local, or do nothing if that local has been made into a static
|
||||
pub fn deallocate_local(&mut self, ptr: Pointer) -> EvalResult<'tcx> {
|
||||
match self.alloc_kind.get(&ptr.alloc_id).cloned() {
|
||||
Some(MemoryKind::Stack) => self.deallocate(ptr, None, MemoryKind::Stack),
|
||||
// Happens if the memory was interned into immutable memory
|
||||
None => Ok(()),
|
||||
other => bug!("local contained non-stack memory: {:?}", other),
|
||||
// The allocation might be already removed by static interning.
|
||||
// This can only really happen in the CTFE instance, not in miri.
|
||||
if self.alloc_map.contains_key(&ptr.alloc_id) {
|
||||
self.deallocate(ptr, None, MemoryKind::Stack)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -223,9 +211,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
return err!(DeallocateNonBasePtr);
|
||||
}
|
||||
|
||||
let alloc = match self.alloc_map.remove(&ptr.alloc_id) {
|
||||
let (alloc_kind, alloc) = match self.alloc_map.remove(&ptr.alloc_id) {
|
||||
Some(alloc) => alloc,
|
||||
None => {
|
||||
// Deallocating static memory -- always an error
|
||||
return match self.tcx.alloc_map.lock().get(ptr.alloc_id) {
|
||||
Some(AllocType::Function(..)) => err!(DeallocatedWrongMemoryKind(
|
||||
"function".to_string(),
|
||||
@ -241,18 +230,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
};
|
||||
|
||||
let alloc_kind = self.alloc_kind
|
||||
.remove(&ptr.alloc_id)
|
||||
.expect("alloc_map out of sync with alloc_kind");
|
||||
|
||||
// It is okay for us to still holds locks on deallocation -- for example, we could store
|
||||
// data we own in a local, and the local could be deallocated (from StorageDead) before the
|
||||
// function returns. However, we should check *something*. For now, we make sure that there
|
||||
// is no conflicting write lock by another frame. We *have* to permit deallocation if we
|
||||
// hold a read lock.
|
||||
// FIXME: Figure out the exact rules here.
|
||||
M::free_lock(self, ptr.alloc_id, alloc.bytes.len() as u64)?;
|
||||
|
||||
if alloc_kind != kind {
|
||||
return err!(DeallocatedWrongMemoryKind(
|
||||
format!("{:?}", alloc_kind),
|
||||
@ -339,63 +316,33 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
|
||||
/// Allocation accessors
|
||||
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
fn const_eval_static(&self, def_id: DefId) -> EvalResult<'tcx, &'tcx Allocation> {
|
||||
if self.tcx.is_foreign_item(def_id) {
|
||||
return err!(ReadForeignStatic);
|
||||
}
|
||||
let instance = Instance::mono(self.tcx.tcx, def_id);
|
||||
let gid = GlobalId {
|
||||
instance,
|
||||
promoted: None,
|
||||
};
|
||||
self.tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|err| {
|
||||
// no need to report anything, the const_eval call takes care of that for statics
|
||||
assert!(self.tcx.is_static(def_id).is_some());
|
||||
EvalErrorKind::ReferencedConstant(err).into()
|
||||
}).map(|val| {
|
||||
self.tcx.const_to_allocation(val)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get(&self, id: AllocId) -> EvalResult<'tcx, &Allocation> {
|
||||
// normal alloc?
|
||||
match self.alloc_map.get(&id) {
|
||||
Some(alloc) => Ok(alloc),
|
||||
// uninitialized static alloc?
|
||||
None => {
|
||||
// static alloc?
|
||||
let alloc = self.tcx.alloc_map.lock().get(id);
|
||||
match alloc {
|
||||
Some(AllocType::Memory(mem)) => Ok(mem),
|
||||
Some(AllocType::Function(..)) => {
|
||||
Err(EvalErrorKind::DerefFunctionPointer.into())
|
||||
}
|
||||
Some(AllocType::Static(did)) => {
|
||||
self.const_eval_static(did)
|
||||
}
|
||||
None => Err(EvalErrorKind::DanglingPointerDeref.into()),
|
||||
}
|
||||
},
|
||||
Some(alloc) => Ok(&alloc.1),
|
||||
// No need to make any copies, just provide read access to the global static
|
||||
// memory in tcx.
|
||||
None => static_alloc(self.tcx, id),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_mut(
|
||||
pub fn get_mut(
|
||||
&mut self,
|
||||
id: AllocId,
|
||||
) -> EvalResult<'tcx, &mut Allocation> {
|
||||
// normal alloc?
|
||||
match self.alloc_map.get_mut(&id) {
|
||||
Some(alloc) => Ok(alloc),
|
||||
// uninitialized static alloc?
|
||||
None => {
|
||||
// no alloc or immutable alloc? produce an error
|
||||
match self.tcx.alloc_map.lock().get(id) {
|
||||
Some(AllocType::Memory(..)) |
|
||||
Some(AllocType::Static(..)) => err!(ModifiedConstantMemory),
|
||||
Some(AllocType::Function(..)) => err!(DerefFunctionPointer),
|
||||
None => err!(DanglingPointerDeref),
|
||||
}
|
||||
},
|
||||
// Static?
|
||||
let alloc = if self.alloc_map.contains_key(&id) {
|
||||
&mut self.alloc_map.get_mut(&id).unwrap().1
|
||||
} else {
|
||||
// The machine controls to what extend we are allowed to mutate global
|
||||
// statics. (We do not want to allow that during CTFE, but miri needs it.)
|
||||
M::access_static_mut(self, id)?
|
||||
};
|
||||
// See if we can use this
|
||||
if alloc.mutability == Mutability::Immutable {
|
||||
err!(ModifiedConstantMemory)
|
||||
} else {
|
||||
Ok(alloc)
|
||||
}
|
||||
}
|
||||
|
||||
@ -410,10 +357,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_alloc_kind(&self, id: AllocId) -> Option<MemoryKind<M::MemoryKinds>> {
|
||||
self.alloc_kind.get(&id).cloned()
|
||||
}
|
||||
|
||||
/// For debugging, print an allocation and all allocations it points to, recursively.
|
||||
pub fn dump_alloc(&self, id: AllocId) {
|
||||
if !log_enabled!(::log::Level::Trace) {
|
||||
@ -441,14 +384,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
let (alloc, immutable) =
|
||||
// normal alloc?
|
||||
match self.alloc_map.get(&id) {
|
||||
Some(a) => (a, match self.alloc_kind[&id] {
|
||||
Some((kind, alloc)) => (alloc, match kind {
|
||||
MemoryKind::Stack => " (stack)".to_owned(),
|
||||
MemoryKind::Machine(m) => format!(" ({:?})", m),
|
||||
}),
|
||||
None => {
|
||||
// static alloc?
|
||||
match self.tcx.alloc_map.lock().get(id) {
|
||||
Some(AllocType::Memory(a)) => (a, "(immutable)".to_owned()),
|
||||
Some(AllocType::Memory(a)) => (a, " (immutable)".to_owned()),
|
||||
Some(AllocType::Function(func)) => {
|
||||
trace!("{} {}", msg, func);
|
||||
continue;
|
||||
@ -510,8 +453,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
pub fn leak_report(&self) -> usize {
|
||||
trace!("### LEAK REPORT ###");
|
||||
let leaks: Vec<_> = self.alloc_map
|
||||
.keys()
|
||||
.cloned()
|
||||
.iter()
|
||||
.filter_map(|(&id, (kind, _))|
|
||||
if kind.is_static() { None } else { Some(id) } )
|
||||
.collect();
|
||||
let n = leaks.len();
|
||||
self.dump_allocs(leaks);
|
||||
@ -534,7 +478,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
if size.bytes() == 0 {
|
||||
return Ok(&[]);
|
||||
}
|
||||
M::check_locks(self, ptr, size, AccessKind::Read)?;
|
||||
// if ptr.offset is in bounds, then so is ptr (because offset checks for overflow)
|
||||
self.check_bounds(ptr.offset(size, self)?, true)?;
|
||||
let alloc = self.get(ptr.alloc_id)?;
|
||||
@ -557,7 +500,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
if size.bytes() == 0 {
|
||||
return Ok(&mut []);
|
||||
}
|
||||
M::check_locks(self, ptr, size, AccessKind::Write)?;
|
||||
// if ptr.offset is in bounds, then so is ptr (because offset checks for overflow)
|
||||
self.check_bounds(ptr.offset(size, &*self)?, true)?;
|
||||
let alloc = self.get_mut(ptr.alloc_id)?;
|
||||
@ -597,11 +539,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
alloc: AllocId,
|
||||
mutability: Mutability,
|
||||
) -> EvalResult<'tcx> {
|
||||
match self.alloc_kind.get(&alloc) {
|
||||
// do not go into statics
|
||||
None => Ok(()),
|
||||
// just locals and machine allocs
|
||||
Some(_) => self.mark_static_initialized(alloc, mutability),
|
||||
match self.alloc_map.contains_key(&alloc) {
|
||||
// already interned
|
||||
false => Ok(()),
|
||||
// this still needs work
|
||||
true => self.mark_static_initialized(alloc, mutability),
|
||||
}
|
||||
}
|
||||
|
||||
@ -616,28 +558,42 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
alloc_id,
|
||||
mutability
|
||||
);
|
||||
// The machine handled it
|
||||
if M::mark_static_initialized(self, alloc_id, mutability)? {
|
||||
return Ok(())
|
||||
// remove allocation
|
||||
let (kind, mut alloc) = self.alloc_map.remove(&alloc_id).unwrap();
|
||||
match kind {
|
||||
MemoryKind::Machine(_) => bug!("Static cannot refer to machine memory"),
|
||||
MemoryKind::Stack => {},
|
||||
}
|
||||
let alloc = self.alloc_map.remove(&alloc_id);
|
||||
match self.alloc_kind.remove(&alloc_id) {
|
||||
None => {},
|
||||
Some(MemoryKind::Machine(_)) => bug!("machine didn't handle machine alloc"),
|
||||
Some(MemoryKind::Stack) => {},
|
||||
// ensure llvm knows not to put this into immutable memory
|
||||
alloc.mutability = mutability;
|
||||
let alloc = self.tcx.intern_const_alloc(alloc);
|
||||
self.tcx.alloc_map.lock().set_id_memory(alloc_id, alloc);
|
||||
// recurse into inner allocations
|
||||
for &alloc in alloc.relocations.values() {
|
||||
// FIXME: Reusing the mutability here is likely incorrect. It is originally
|
||||
// determined via `is_freeze`, and data is considered frozen if there is no
|
||||
// `UnsafeCell` *immediately* in that data -- however, this search stops
|
||||
// at references. So whenever we follow a reference, we should likely
|
||||
// assume immutability -- and we should make sure that the compiler
|
||||
// does not permit code that would break this!
|
||||
self.mark_inner_allocation_initialized(alloc, mutability)?;
|
||||
}
|
||||
if let Some(mut alloc) = alloc {
|
||||
// ensure llvm knows not to put this into immutable memory
|
||||
alloc.runtime_mutability = mutability;
|
||||
let alloc = self.tcx.intern_const_alloc(alloc);
|
||||
self.tcx.alloc_map.lock().set_id_memory(alloc_id, alloc);
|
||||
// recurse into inner allocations
|
||||
for &alloc in alloc.relocations.values() {
|
||||
self.mark_inner_allocation_initialized(alloc, mutability)?;
|
||||
}
|
||||
} else {
|
||||
bug!("no allocation found for {:?}", alloc_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The alloc_id must refer to a (mutable) static; a deep copy of that
|
||||
/// static is made into this memory.
|
||||
pub fn deep_copy_static(
|
||||
&mut self,
|
||||
id: AllocId,
|
||||
kind: MemoryKind<M::MemoryKinds>,
|
||||
) -> EvalResult<'tcx> {
|
||||
let alloc = static_alloc(self.tcx, id)?;
|
||||
if alloc.mutability == Mutability::Immutable {
|
||||
return err!(ModifiedConstantMemory);
|
||||
}
|
||||
let old = self.alloc_map.insert(id, (kind, alloc.clone()));
|
||||
assert!(old.is_none(), "deep_copy_static: must not overwrite memory with");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -745,7 +701,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
return err!(ReadPointerAsBytes);
|
||||
}
|
||||
self.check_defined(ptr, p1)?;
|
||||
M::check_locks(self, ptr, p1, AccessKind::Read)?;
|
||||
Ok(&alloc.bytes[offset..offset + size])
|
||||
}
|
||||
None => err!(UnterminatedCString(ptr)),
|
||||
@ -802,9 +757,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
let bytes = self.get_bytes_unchecked(ptr, size, ptr_align.min(self.int_align(size)))?;
|
||||
// Undef check happens *after* we established that the alignment is correct.
|
||||
// We must not return Ok() for unaligned pointers!
|
||||
if self.check_defined(ptr, size).is_err() {
|
||||
// this inflates undefined bytes to the entire scalar,
|
||||
// even if only a few bytes are undefined
|
||||
if !self.is_defined(ptr, size)? {
|
||||
// this inflates undefined bytes to the entire scalar, even if only a few
|
||||
// bytes are undefined
|
||||
return Ok(ScalarMaybeUndef::Undef);
|
||||
}
|
||||
// Now we do the actual reading
|
||||
@ -990,16 +945,21 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_defined(&self, ptr: Pointer, size: Size) -> EvalResult<'tcx> {
|
||||
fn is_defined(&self, ptr: Pointer, size: Size) -> EvalResult<'tcx, bool> {
|
||||
let alloc = self.get(ptr.alloc_id)?;
|
||||
if !alloc.undef_mask.is_range_defined(
|
||||
Ok(alloc.undef_mask.is_range_defined(
|
||||
ptr.offset,
|
||||
ptr.offset + size,
|
||||
)
|
||||
{
|
||||
return err!(ReadUndefBytes);
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn check_defined(&self, ptr: Pointer, size: Size) -> EvalResult<'tcx> {
|
||||
if self.is_defined(ptr, size)? {
|
||||
Ok(())
|
||||
} else {
|
||||
err!(ReadUndefBytes)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn mark_definedness(
|
||||
|
@ -22,6 +22,7 @@ mod terminator;
|
||||
mod traits;
|
||||
mod const_eval;
|
||||
mod validity;
|
||||
mod intrinsics;
|
||||
|
||||
pub use self::eval_context::{
|
||||
EvalContext, Frame, StackPopCleanup, LocalValue,
|
||||
@ -41,8 +42,9 @@ pub use self::const_eval::{
|
||||
const_field,
|
||||
const_variant_index,
|
||||
op_to_const,
|
||||
static_alloc,
|
||||
};
|
||||
|
||||
pub use self::machine::Machine;
|
||||
pub use self::machine::{Machine, IsStatic};
|
||||
|
||||
pub use self::operand::{Value, ValTy, Operand, OpTy};
|
||||
|
@ -14,7 +14,7 @@
|
||||
use std::convert::TryInto;
|
||||
|
||||
use rustc::mir;
|
||||
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout, HasDataLayout, IntegerExt};
|
||||
use rustc::ty::layout::{self, Size, Align, LayoutOf, TyLayout, HasDataLayout, IntegerExt};
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
|
||||
use rustc::mir::interpret::{
|
||||
@ -300,6 +300,22 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
// operand must be a &str or compatible layout
|
||||
pub fn read_str(
|
||||
&self,
|
||||
op: OpTy<'tcx>,
|
||||
) -> EvalResult<'tcx, &str> {
|
||||
let val = self.read_value(op)?;
|
||||
if let Value::ScalarPair(ptr, len) = *val {
|
||||
let len = len.not_undef()?.to_bits(self.memory.pointer_size())?;
|
||||
let bytes = self.memory.read_bytes(ptr.not_undef()?, Size::from_bytes(len as u64))?;
|
||||
let str = ::std::str::from_utf8(bytes).map_err(|err| EvalErrorKind::ValidationFailure(err.to_string()))?;
|
||||
Ok(str)
|
||||
} else {
|
||||
bug!("read_str: not a str")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn uninit_operand(&mut self, layout: TyLayout<'tcx>) -> EvalResult<'tcx, Operand> {
|
||||
// This decides which types we will use the Immediate optimization for, and hence should
|
||||
// match what `try_read_value` and `eval_place_to_op` support.
|
||||
@ -482,9 +498,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
// Unfortunately, this needs an `&mut` to be able to allocate a copy of a `ByRef`
|
||||
// constant. This bleeds up to `eval_operand` needing `&mut`.
|
||||
pub fn const_value_to_op(
|
||||
&mut self,
|
||||
&self,
|
||||
val: ConstValue<'tcx>,
|
||||
) -> EvalResult<'tcx, Operand> {
|
||||
trace!("const_value_to_op: {:?}", val);
|
||||
match val {
|
||||
ConstValue::Unevaluated(def_id, substs) => {
|
||||
let instance = self.resolve(def_id, substs)?;
|
||||
@ -493,9 +510,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
promoted: None,
|
||||
})
|
||||
}
|
||||
ConstValue::ByRef(alloc, offset) => {
|
||||
// FIXME: Allocate new AllocId for all constants inside
|
||||
let id = self.memory.allocate_value(alloc.clone(), MemoryKind::Stack)?;
|
||||
ConstValue::ByRef(id, alloc, offset) => {
|
||||
// We rely on mutability being set correctly in that allocation to prevent writes
|
||||
// where none should happen -- and for `static mut`, we copy on demand anyway.
|
||||
Ok(Operand::from_ptr(Pointer::new(id, offset), alloc.align))
|
||||
},
|
||||
ConstValue::ScalarPair(a, b) =>
|
||||
@ -505,7 +522,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn global_to_op(&mut self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, Operand> {
|
||||
pub(super) fn global_to_op(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, Operand> {
|
||||
let cv = self.const_eval(gid)?;
|
||||
self.const_value_to_op(cv.val)
|
||||
}
|
||||
|
@ -247,6 +247,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
let pointee_type = val.layout.ty.builtin_deref(true).unwrap().ty;
|
||||
let layout = self.layout_of(pointee_type)?;
|
||||
let mplace = match self.tcx.struct_tail(pointee_type).sty {
|
||||
// Matching on the type is okay here, because we used `struct_tail` to get to
|
||||
// the "core" of what makes this unsized.
|
||||
ty::Dynamic(..) => {
|
||||
let (ptr, vtable) = val.to_scalar_dyn_trait()?;
|
||||
MemPlace {
|
||||
@ -263,11 +265,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
extra: PlaceExtra::Length(len),
|
||||
}
|
||||
}
|
||||
_ => MemPlace {
|
||||
ptr: val.to_scalar()?,
|
||||
align: layout.align,
|
||||
extra: PlaceExtra::None,
|
||||
},
|
||||
_ => {
|
||||
assert!(!layout.is_unsized(), "Unhandled unsized type {:?}", pointee_type);
|
||||
MemPlace {
|
||||
ptr: val.to_scalar()?,
|
||||
align: layout.align,
|
||||
extra: PlaceExtra::None,
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(MPlaceTy { mplace, layout })
|
||||
}
|
||||
@ -371,6 +376,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
// Compute extra and new layout
|
||||
let inner_len = len - to - from;
|
||||
let (extra, ty) = match base.layout.ty.sty {
|
||||
// It is not nice to match on the type, but that seems to be the only way to
|
||||
// implement this.
|
||||
ty::Array(inner, _) =>
|
||||
(PlaceExtra::None, self.tcx.mk_array(inner, inner_len)),
|
||||
ty::Slice(..) =>
|
||||
@ -526,7 +533,12 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
instance,
|
||||
promoted: None
|
||||
};
|
||||
let alloc = Machine::init_static(self, cid)?;
|
||||
// Just create a lazy reference, so we can support recursive statics.
|
||||
// When the data here is ever actually used, memory will notice,
|
||||
// and it knows how to deal with alloc_id that are present in the
|
||||
// global table but not in its local memory.
|
||||
let alloc = self.tcx.alloc_map.lock()
|
||||
.intern_static(cid.instance.def_id());
|
||||
MPlaceTy::from_aligned_ptr(alloc.into(), layout).into()
|
||||
}
|
||||
|
||||
@ -692,6 +704,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
&mut self,
|
||||
OpTy { op, layout }: OpTy<'tcx>,
|
||||
) -> EvalResult<'tcx, MPlaceTy<'tcx>> {
|
||||
trace!("allocate_op: {:?}", op);
|
||||
Ok(match op {
|
||||
Operand::Indirect(mplace) => MPlaceTy { mplace, layout },
|
||||
Operand::Immediate(value) => {
|
||||
|
@ -76,8 +76,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
self.loop_detector.observe_and_analyze(&self.machine, &self.stack, &self.memory)
|
||||
}
|
||||
|
||||
pub fn run(&mut self) -> EvalResult<'tcx> {
|
||||
while self.step()? {}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns true as long as there are more things to do.
|
||||
pub fn step(&mut self) -> EvalResult<'tcx, bool> {
|
||||
fn step(&mut self) -> EvalResult<'tcx, bool> {
|
||||
if self.stack.is_empty() {
|
||||
return Ok(false);
|
||||
}
|
||||
@ -147,10 +152,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
M::validation_op(self, op, operand)?;
|
||||
}
|
||||
}
|
||||
EndRegion(ce) => {
|
||||
M::end_region(self, Some(ce))?;
|
||||
}
|
||||
|
||||
EndRegion(..) => {}
|
||||
UserAssertTy(..) => {}
|
||||
|
||||
// Defined to do nothing. These are added by optimization passes, to avoid changing the
|
||||
@ -327,8 +330,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
debug!("{:?}", terminator.kind);
|
||||
self.tcx.span = terminator.source_info.span;
|
||||
self.memory.tcx.span = terminator.source_info.span;
|
||||
|
||||
let old_stack = self.cur_frame();
|
||||
let old_bb = self.frame().block;
|
||||
self.eval_terminator(terminator)?;
|
||||
if !self.stack.is_empty() {
|
||||
// This should change *something*
|
||||
debug_assert!(self.cur_frame() != old_stack || self.frame().block != old_bb);
|
||||
debug!("// {:?}", self.frame().block);
|
||||
}
|
||||
Ok(())
|
||||
|
@ -57,8 +57,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
|
||||
self.eval_fn_call(
|
||||
instance,
|
||||
Some((dest, target)),
|
||||
&[arg],
|
||||
Some(dest),
|
||||
Some(target),
|
||||
span,
|
||||
fn_sig,
|
||||
)
|
||||
|
@ -15,16 +15,22 @@ use syntax::source_map::Span;
|
||||
use rustc_target::spec::abi::Abi;
|
||||
|
||||
use rustc::mir::interpret::{EvalResult, Scalar};
|
||||
use super::{EvalContext, Machine, Value, OpTy, PlaceTy, ValTy, Operand};
|
||||
use super::{EvalContext, Machine, Value, OpTy, Place, PlaceTy, ValTy, Operand, StackPopCleanup};
|
||||
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
|
||||
mod drop;
|
||||
|
||||
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
pub fn goto_block(&mut self, target: mir::BasicBlock) {
|
||||
self.frame_mut().block = target;
|
||||
self.frame_mut().stmt = 0;
|
||||
#[inline]
|
||||
pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> EvalResult<'tcx> {
|
||||
if let Some(target) = target {
|
||||
self.frame_mut().block = target;
|
||||
self.frame_mut().stmt = 0;
|
||||
Ok(())
|
||||
} else {
|
||||
err!(Unreachable)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn eval_terminator(
|
||||
@ -38,7 +44,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
self.pop_stack_frame()?
|
||||
}
|
||||
|
||||
Goto { target } => self.goto_block(target),
|
||||
Goto { target } => self.goto_block(Some(target))?,
|
||||
|
||||
SwitchInt {
|
||||
ref discr,
|
||||
@ -69,7 +75,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
self.goto_block(target_block);
|
||||
self.goto_block(Some(target_block))?;
|
||||
}
|
||||
|
||||
Call {
|
||||
@ -78,9 +84,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
ref destination,
|
||||
..
|
||||
} => {
|
||||
let destination = match *destination {
|
||||
Some((ref lv, target)) => Some((self.eval_place(lv)?, target)),
|
||||
None => None,
|
||||
let (dest, ret) = match *destination {
|
||||
Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
|
||||
None => (None, None),
|
||||
};
|
||||
|
||||
let func = self.eval_operand(func, None)?;
|
||||
@ -124,8 +130,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
);
|
||||
self.eval_fn_call(
|
||||
fn_def,
|
||||
destination,
|
||||
&args[..],
|
||||
dest,
|
||||
ret,
|
||||
terminator.source_info.span,
|
||||
sig,
|
||||
)?;
|
||||
@ -161,7 +168,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
.to_scalar()?
|
||||
.to_bool()?;
|
||||
if expected == cond_val {
|
||||
self.goto_block(target);
|
||||
self.goto_block(Some(target))?;
|
||||
} else {
|
||||
use rustc::mir::interpret::EvalErrorKind::*;
|
||||
return match *msg {
|
||||
@ -273,30 +280,51 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
fn eval_fn_call(
|
||||
&mut self,
|
||||
instance: ty::Instance<'tcx>,
|
||||
destination: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
||||
args: &[OpTy<'tcx>],
|
||||
dest: Option<PlaceTy<'tcx>>,
|
||||
ret: Option<mir::BasicBlock>,
|
||||
span: Span,
|
||||
sig: ty::FnSig<'tcx>,
|
||||
) -> EvalResult<'tcx> {
|
||||
trace!("eval_fn_call: {:#?}", instance);
|
||||
if let Some((place, _)) = destination {
|
||||
if let Some(place) = dest {
|
||||
assert_eq!(place.layout.ty, sig.output());
|
||||
}
|
||||
match instance.def {
|
||||
ty::InstanceDef::Intrinsic(..) => {
|
||||
let (ret, target) = match destination {
|
||||
// The intrinsic itself cannot diverge, so if we got here without a return
|
||||
// place... (can happen e.g. for transmute returning `!`)
|
||||
let dest = match dest {
|
||||
Some(dest) => dest,
|
||||
_ => return err!(Unreachable),
|
||||
None => return err!(Unreachable)
|
||||
};
|
||||
M::call_intrinsic(self, instance, args, ret, target)?;
|
||||
self.dump_place(*ret);
|
||||
M::call_intrinsic(self, instance, args, dest)?;
|
||||
// No stack frame gets pushed, the main loop will just act as if the
|
||||
// call completed.
|
||||
self.goto_block(ret)?;
|
||||
self.dump_place(*dest);
|
||||
Ok(())
|
||||
}
|
||||
// FIXME: figure out why we can't just go through the shim
|
||||
ty::InstanceDef::ClosureOnceShim { .. } => {
|
||||
if M::eval_fn_call(self, instance, destination, args, span)? {
|
||||
return Ok(());
|
||||
}
|
||||
let mir = match M::find_fn(self, instance, args, dest, ret)? {
|
||||
Some(mir) => mir,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
let return_place = match dest {
|
||||
Some(place) => *place,
|
||||
None => Place::null(&self),
|
||||
};
|
||||
self.push_stack_frame(
|
||||
instance,
|
||||
span,
|
||||
mir,
|
||||
return_place,
|
||||
StackPopCleanup::Goto(ret),
|
||||
)?;
|
||||
|
||||
// Pass the arguments
|
||||
let mut arg_locals = self.frame().mir.args_iter();
|
||||
match sig.abi {
|
||||
// closure as closure once
|
||||
@ -333,13 +361,22 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
ty::InstanceDef::DropGlue(..) |
|
||||
ty::InstanceDef::CloneShim(..) |
|
||||
ty::InstanceDef::Item(_) => {
|
||||
// Push the stack frame, and potentially be entirely done if the call got hooked
|
||||
if M::eval_fn_call(self, instance, destination, args, span)? {
|
||||
// FIXME: Can we make it return the frame to push, instead
|
||||
// of the hook doing half of the work and us doing the argument
|
||||
// initialization?
|
||||
return Ok(());
|
||||
}
|
||||
let mir = match M::find_fn(self, instance, args, dest, ret)? {
|
||||
Some(mir) => mir,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
let return_place = match dest {
|
||||
Some(place) => *place,
|
||||
None => Place::null(&self),
|
||||
};
|
||||
self.push_stack_frame(
|
||||
instance,
|
||||
span,
|
||||
mir,
|
||||
return_place,
|
||||
StackPopCleanup::Goto(ret),
|
||||
)?;
|
||||
|
||||
// Pass the arguments
|
||||
let mut arg_locals = self.frame().mir.args_iter();
|
||||
@ -418,7 +455,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
args[0].op = Operand::Immediate(Value::Scalar(ptr.into())); // strip vtable
|
||||
trace!("Patched self operand to {:#?}", args[0]);
|
||||
// recurse with concrete function
|
||||
self.eval_fn_call(instance, destination, &args, span, sig)
|
||||
self.eval_fn_call(instance, &args, dest, ret, span, sig)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1271,7 +1271,7 @@ fn collect_const<'a, 'tcx>(
|
||||
ConstValue::ScalarPair(Scalar::Ptr(ptr), _) |
|
||||
ConstValue::Scalar(Scalar::Ptr(ptr)) =>
|
||||
collect_miri(tcx, ptr.alloc_id, output),
|
||||
ConstValue::ByRef(alloc, _offset) => {
|
||||
ConstValue::ByRef(_id, alloc, _offset) => {
|
||||
for &id in alloc.relocations.values() {
|
||||
collect_miri(tcx, id, output);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user