Reintroduce Undef and properly check constant value sizes

This commit is contained in:
Oliver Schneider 2018-07-24 18:28:53 +02:00
parent 8c069ceba8
commit c6c06854c0
20 changed files with 539 additions and 501 deletions

View File

@ -392,6 +392,11 @@ for ::mir::interpret::ConstValue<'gcx> {
} }
} }
impl_stable_hash_for!(enum mir::interpret::ScalarMaybeUndef {
Scalar(v),
Undef
});
impl_stable_hash_for!(enum mir::interpret::Value { impl_stable_hash_for!(enum mir::interpret::Value {
Scalar(v), Scalar(v),
ScalarPair(a, b), ScalarPair(a, b),
@ -466,9 +471,9 @@ for ::mir::interpret::Scalar {
mem::discriminant(self).hash_stable(hcx, hasher); mem::discriminant(self).hash_stable(hcx, hasher);
match *self { match *self {
Bits { bits, defined } => { Bits { bits, size } => {
bits.hash_stable(hcx, hasher); bits.hash_stable(hcx, hasher);
defined.hash_stable(hcx, hasher); size.hash_stable(hcx, hasher);
}, },
Ptr(ptr) => ptr.hash_stable(hcx, hasher), Ptr(ptr) => ptr.hash_stable(hcx, hasher),
} }

View File

@ -13,7 +13,7 @@ pub use self::error::{
FrameInfo, ConstEvalResult, FrameInfo, ConstEvalResult,
}; };
pub use self::value::{Scalar, Value, ConstValue}; pub use self::value::{Scalar, Value, ConstValue, ScalarMaybeUndef};
use std::fmt; use std::fmt;
use mir; use mir;

View File

@ -15,7 +15,7 @@ pub enum ConstValue<'tcx> {
/// to allow HIR creation to happen for everything before needing to be able to run constant /// to allow HIR creation to happen for everything before needing to be able to run constant
/// evaluation /// evaluation
Unevaluated(DefId, &'tcx Substs<'tcx>), Unevaluated(DefId, &'tcx Substs<'tcx>),
/// Used only for types with layout::abi::Scalar ABI and ZSTs which use Scalar::undef() /// Used only for types with layout::abi::Scalar ABI and ZSTs
Scalar(Scalar), Scalar(Scalar),
/// Used only for types with layout::abi::ScalarPair /// Used only for types with layout::abi::ScalarPair
ScalarPair(Scalar, Scalar), ScalarPair(Scalar, Scalar),
@ -25,12 +25,12 @@ pub enum ConstValue<'tcx> {
impl<'tcx> ConstValue<'tcx> { impl<'tcx> ConstValue<'tcx> {
#[inline] #[inline]
pub fn from_byval_value(val: Value) -> Self { pub fn from_byval_value(val: Value) -> EvalResult<'static, Self> {
match val { Ok(match val {
Value::ByRef(..) => bug!(), Value::ByRef(..) => bug!(),
Value::ScalarPair(a, b) => ConstValue::ScalarPair(a, b), Value::ScalarPair(a, b) => ConstValue::ScalarPair(a.read()?, b.read()?),
Value::Scalar(val) => ConstValue::Scalar(val), Value::Scalar(val) => ConstValue::Scalar(val.read()?),
} })
} }
#[inline] #[inline]
@ -38,18 +38,13 @@ impl<'tcx> ConstValue<'tcx> {
match *self { match *self {
ConstValue::Unevaluated(..) | ConstValue::Unevaluated(..) |
ConstValue::ByRef(..) => None, ConstValue::ByRef(..) => None,
ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a, b)), ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a.into(), b.into())),
ConstValue::Scalar(val) => Some(Value::Scalar(val)), ConstValue::Scalar(val) => Some(Value::Scalar(val.into())),
} }
} }
#[inline] #[inline]
pub fn from_scalar(val: Scalar) -> Self { pub fn try_to_scalar(&self) -> Option<Scalar> {
ConstValue::Scalar(val)
}
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
match *self { match *self {
ConstValue::Unevaluated(..) | ConstValue::Unevaluated(..) |
ConstValue::ByRef(..) | ConstValue::ByRef(..) |
@ -60,12 +55,12 @@ impl<'tcx> ConstValue<'tcx> {
#[inline] #[inline]
pub fn to_bits(&self, size: Size) -> Option<u128> { pub fn to_bits(&self, size: Size) -> Option<u128> {
self.to_scalar()?.to_bits(size).ok() self.try_to_scalar()?.to_bits(size).ok()
} }
#[inline] #[inline]
pub fn to_ptr(&self) -> Option<Pointer> { pub fn to_ptr(&self) -> Option<Pointer> {
self.to_scalar()?.to_ptr().ok() self.try_to_scalar()?.to_ptr().ok()
} }
} }
@ -81,8 +76,8 @@ impl<'tcx> ConstValue<'tcx> {
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)] #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum Value { pub enum Value {
ByRef(Scalar, Align), ByRef(Scalar, Align),
Scalar(Scalar), Scalar(ScalarMaybeUndef),
ScalarPair(Scalar, Scalar), ScalarPair(ScalarMaybeUndef, ScalarMaybeUndef),
} }
impl<'tcx> ty::TypeFoldable<'tcx> for Value { impl<'tcx> ty::TypeFoldable<'tcx> for Value {
@ -98,23 +93,23 @@ impl<'tcx> Scalar {
pub fn ptr_null<C: HasDataLayout>(cx: C) -> Self { pub fn ptr_null<C: HasDataLayout>(cx: C) -> Self {
Scalar::Bits { Scalar::Bits {
bits: 0, bits: 0,
defined: cx.data_layout().pointer_size.bits() as u8, size: cx.data_layout().pointer_size.bytes() as u8,
} }
} }
pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
ScalarMaybeUndef::Scalar(self).to_value_with_len(len, cx)
}
pub fn ptr_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> { pub fn ptr_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout(); let layout = cx.data_layout();
match self { match self {
Scalar::Bits { bits, defined } => { Scalar::Bits { bits, size } => {
let pointer_size = layout.pointer_size.bits() as u8; assert_eq!(size as u64, layout.pointer_size.bytes());
if defined < pointer_size { Ok(Scalar::Bits {
err!(ReadUndefBytes) bits: layout.signed_offset(bits as u64, i)? as u128,
} else { size,
Ok(Scalar::Bits { })
bits: layout.signed_offset(bits as u64, i)? as u128,
defined: pointer_size,
})
}
} }
Scalar::Ptr(ptr) => ptr.signed_offset(i, layout).map(Scalar::Ptr), Scalar::Ptr(ptr) => ptr.signed_offset(i, layout).map(Scalar::Ptr),
} }
@ -123,65 +118,43 @@ impl<'tcx> Scalar {
pub fn ptr_offset<C: HasDataLayout>(self, i: Size, cx: C) -> EvalResult<'tcx, Self> { pub fn ptr_offset<C: HasDataLayout>(self, i: Size, cx: C) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout(); let layout = cx.data_layout();
match self { match self {
Scalar::Bits { bits, defined } => { Scalar::Bits { bits, size } => {
let pointer_size = layout.pointer_size.bits() as u8; assert_eq!(size as u64, layout.pointer_size.bytes());
if defined < pointer_size { Ok(Scalar::Bits {
err!(ReadUndefBytes) bits: layout.offset(bits as u64, i.bytes())? as u128,
} else { size,
Ok(Scalar::Bits { })
bits: layout.offset(bits as u64, i.bytes())? as u128,
defined: pointer_size,
})
}
} }
Scalar::Ptr(ptr) => ptr.offset(i, layout).map(Scalar::Ptr), Scalar::Ptr(ptr) => ptr.offset(i, layout).map(Scalar::Ptr),
} }
} }
pub fn ptr_wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> { pub fn ptr_wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> Self {
let layout = cx.data_layout(); let layout = cx.data_layout();
match self { match self {
Scalar::Bits { bits, defined } => { Scalar::Bits { bits, size } => {
let pointer_size = layout.pointer_size.bits() as u8; assert_eq!(size as u64, layout.pointer_size.bytes());
if defined < pointer_size { Scalar::Bits {
err!(ReadUndefBytes) bits: layout.wrapping_signed_offset(bits as u64, i) as u128,
} else { size,
Ok(Scalar::Bits { }
bits: layout.wrapping_signed_offset(bits as u64, i) as u128,
defined: pointer_size,
})
} }
} Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, layout)),
Scalar::Ptr(ptr) => Ok(Scalar::Ptr(ptr.wrapping_signed_offset(i, layout))),
} }
} }
pub fn is_null_ptr<C: HasDataLayout>(self, cx: C) -> EvalResult<'tcx, bool> { pub fn is_null_ptr<C: HasDataLayout>(self, cx: C) -> bool {
match self { match self {
Scalar::Bits { Scalar::Bits { bits, size } => {
bits, defined, assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
} => if defined < cx.data_layout().pointer_size.bits() as u8 { bits == 0
err!(ReadUndefBytes)
} else {
Ok(bits == 0)
}, },
Scalar::Ptr(_) => Ok(false), Scalar::Ptr(_) => false,
} }
} }
pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
Value::ScalarPair(self, Scalar::Bits {
bits: len as u128,
defined: cx.data_layout().pointer_size.bits() as u8,
})
}
pub fn to_value_with_vtable(self, vtable: Pointer) -> Value {
Value::ScalarPair(self, Scalar::Ptr(vtable))
}
pub fn to_value(self) -> Value { pub fn to_value(self) -> Value {
Value::Scalar(self) Value::Scalar(ScalarMaybeUndef::Scalar(self))
} }
} }
@ -199,8 +172,9 @@ impl From<Pointer> for Scalar {
pub enum Scalar { pub enum Scalar {
/// The raw bytes of a simple value. /// The raw bytes of a simple value.
Bits { Bits {
/// The first `defined` number of bits are valid /// The first `size` bytes are the value.
defined: u8, /// Do not try to read less or more bytes that that
size: u8,
bits: u128, bits: u128,
}, },
@ -210,25 +184,81 @@ pub enum Scalar {
Ptr(Pointer), Ptr(Pointer),
} }
impl<'tcx> Scalar { #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub fn undef() -> Self { pub enum ScalarMaybeUndef {
Scalar::Bits { bits: 0, defined: 0 } Scalar(Scalar),
Undef,
}
impl From<Scalar> for ScalarMaybeUndef {
fn from(s: Scalar) -> Self {
ScalarMaybeUndef::Scalar(s)
}
}
impl ScalarMaybeUndef {
pub fn read(self) -> EvalResult<'static, Scalar> {
match self {
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
ScalarMaybeUndef::Undef => err!(ReadUndefBytes),
}
} }
pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
Value::ScalarPair(self.into(), Scalar::Bits {
bits: len as u128,
size: cx.data_layout().pointer_size.bytes() as u8,
}.into())
}
pub fn to_value_with_vtable(self, vtable: Pointer) -> Value {
Value::ScalarPair(self.into(), Scalar::Ptr(vtable).into())
}
pub fn ptr_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> {
match self {
ScalarMaybeUndef::Scalar(scalar) => {
scalar.ptr_signed_offset(i, cx).map(ScalarMaybeUndef::Scalar)
},
ScalarMaybeUndef::Undef => Ok(ScalarMaybeUndef::Undef)
}
}
pub fn ptr_offset<C: HasDataLayout>(self, i: Size, cx: C) -> EvalResult<'tcx, Self> {
match self {
ScalarMaybeUndef::Scalar(scalar) => {
scalar.ptr_offset(i, cx).map(ScalarMaybeUndef::Scalar)
},
ScalarMaybeUndef::Undef => Ok(ScalarMaybeUndef::Undef)
}
}
pub fn ptr_wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> Self {
match self {
ScalarMaybeUndef::Scalar(scalar) => {
ScalarMaybeUndef::Scalar(scalar.ptr_wrapping_signed_offset(i, cx))
},
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef
}
}
}
impl<'tcx> Scalar {
pub fn from_bool(b: bool) -> Self { pub fn from_bool(b: bool) -> Self {
// FIXME: can we make defined `1`? Scalar::Bits { bits: b as u128, size: 1 }
Scalar::Bits { bits: b as u128, defined: 8 }
} }
pub fn from_char(c: char) -> Self { pub fn from_char(c: char) -> Self {
Scalar::Bits { bits: c as u128, defined: 32 } Scalar::Bits { bits: c as u128, size: 4 }
} }
pub fn to_bits(self, size: Size) -> EvalResult<'tcx, u128> { pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
match self { match self {
Scalar::Bits { .. } if size.bits() == 0 => bug!("to_bits cannot be used with zsts"), Scalar::Bits { bits, size } => {
Scalar::Bits { bits, defined } if size.bits() <= defined as u64 => Ok(bits), assert_eq!(target_size.bytes(), size as u64);
Scalar::Bits { .. } => err!(ReadUndefBytes), assert_ne!(size, 0, "to_bits cannot be used with zsts");
Ok(bits)
}
Scalar::Ptr(_) => err!(ReadPointerAsBytes), Scalar::Ptr(_) => err!(ReadPointerAsBytes),
} }
} }
@ -256,8 +286,8 @@ impl<'tcx> Scalar {
pub fn to_bool(self) -> EvalResult<'tcx, bool> { pub fn to_bool(self) -> EvalResult<'tcx, bool> {
match self { match self {
Scalar::Bits { bits: 0, defined: 8 } => Ok(false), Scalar::Bits { bits: 0, size: 1 } => Ok(false),
Scalar::Bits { bits: 1, defined: 8 } => Ok(true), Scalar::Bits { bits: 1, size: 1 } => Ok(true),
_ => err!(InvalidBool), _ => err!(InvalidBool),
} }
} }

View File

@ -17,7 +17,7 @@ use hir::def::CtorKind;
use hir::def_id::DefId; use hir::def_id::DefId;
use hir::{self, HirId, InlineAsm}; use hir::{self, HirId, InlineAsm};
use middle::region; use middle::region;
use mir::interpret::{EvalErrorKind, Scalar, Value}; use mir::interpret::{EvalErrorKind, Scalar, Value, ScalarMaybeUndef};
use mir::visit::MirVisitable; use mir::visit::MirVisitable;
use rustc_apfloat::ieee::{Double, Single}; use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float; use rustc_apfloat::Float;
@ -1465,10 +1465,10 @@ impl<'tcx> TerminatorKind<'tcx> {
.map(|&u| { .map(|&u| {
let mut s = String::new(); let mut s = String::new();
print_miri_value( print_miri_value(
Value::Scalar(Scalar::Bits { Scalar::Bits {
bits: u, bits: u,
defined: size.bits() as u8, size: size.bytes() as u8,
}), }.to_value(),
switch_ty, switch_ty,
&mut s, &mut s,
).unwrap(); ).unwrap();
@ -2225,45 +2225,58 @@ pub fn fmt_const_val<W: Write>(fmt: &mut W, const_val: &ty::Const) -> fmt::Resul
pub fn print_miri_value<W: Write>(value: Value, ty: Ty, f: &mut W) -> fmt::Result { pub fn print_miri_value<W: Write>(value: Value, ty: Ty, f: &mut W) -> fmt::Result {
use ty::TypeVariants::*; use ty::TypeVariants::*;
match (value, &ty.sty) { // print some primitives
(Value::Scalar(Scalar::Bits { bits: 0, .. }), &TyBool) => write!(f, "false"), if let Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits, .. })) = value {
(Value::Scalar(Scalar::Bits { bits: 1, .. }), &TyBool) => write!(f, "true"), match ty.sty {
(Value::Scalar(Scalar::Bits { bits, .. }), &TyFloat(ast::FloatTy::F32)) => { TyBool if bits == 0 => return write!(f, "false"),
write!(f, "{}f32", Single::from_bits(bits)) TyBool if bits == 1 => return write!(f, "true"),
} TyFloat(ast::FloatTy::F32) => return write!(f, "{}f32", Single::from_bits(bits)),
(Value::Scalar(Scalar::Bits { bits, .. }), &TyFloat(ast::FloatTy::F64)) => { TyFloat(ast::FloatTy::F64) => return write!(f, "{}f64", Double::from_bits(bits)),
write!(f, "{}f64", Double::from_bits(bits)) TyUint(ui) => return write!(f, "{:?}{}", bits, ui),
} TyInt(i) => {
(Value::Scalar(Scalar::Bits { bits, .. }), &TyUint(ui)) => write!(f, "{:?}{}", bits, ui), let bit_width = ty::tls::with(|tcx| {
(Value::Scalar(Scalar::Bits { bits, .. }), &TyInt(i)) => { let ty = tcx.lift_to_global(&ty).unwrap();
let bit_width = ty::tls::with(|tcx| { tcx.layout_of(ty::ParamEnv::empty().and(ty))
let ty = tcx.lift_to_global(&ty).unwrap(); .unwrap()
tcx.layout_of(ty::ParamEnv::empty().and(ty)) .size
.unwrap() .bits()
.size });
.bits() let shift = 128 - bit_width;
}); return write!(f, "{:?}{}", ((bits as i128) << shift) >> shift, i);
let shift = 128 - bit_width;
write!(f, "{:?}{}", ((bits as i128) << shift) >> shift, i)
}
(Value::Scalar(Scalar::Bits { bits, .. }), &TyChar) => {
write!(f, "{:?}", ::std::char::from_u32(bits as u32).unwrap())
}
(_, &TyFnDef(did, _)) => write!(f, "{}", item_path_str(did)),
(
Value::ScalarPair(Scalar::Ptr(ptr), Scalar::Bits { bits: len, .. }),
&TyRef(_, &ty::TyS { sty: TyStr, .. }, _),
) => ty::tls::with(|tcx| match tcx.alloc_map.lock().get(ptr.alloc_id) {
Some(interpret::AllocType::Memory(alloc)) => {
assert_eq!(len as usize as u128, len);
let slice = &alloc.bytes[(ptr.offset.bytes() as usize)..][..(len as usize)];
let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri");
write!(f, "{:?}", s)
} }
_ => write!(f, "pointer to erroneous constant {:?}, {:?}", ptr, len), TyChar => return write!(f, "{:?}", ::std::char::from_u32(bits as u32).unwrap()),
}), _ => {},
_ => write!(f, "{:?}:{}", value, ty), }
} }
// print function definitons
if let TyFnDef(did, _) = ty.sty {
return write!(f, "{}", item_path_str(did));
}
// print string literals
if let Value::ScalarPair(ptr, len) = value {
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = ptr {
if let ScalarMaybeUndef::Scalar(Scalar::Bits { bits: len, .. }) = len {
if let TyRef(_, &ty::TyS { sty: TyStr, .. }, _) = ty.sty {
return ty::tls::with(|tcx| {
let alloc = tcx.alloc_map.lock().get(ptr.alloc_id);
if let Some(interpret::AllocType::Memory(alloc)) = alloc {
assert_eq!(len as usize as u128, len);
let slice = &alloc
.bytes
[(ptr.offset.bytes() as usize)..]
[..(len as usize)];
let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri");
write!(f, "{:?}", s)
} else {
write!(f, "pointer to erroneous constant {:?}, {:?}", ptr, len)
}
});
}
}
}
}
// just raw dump everything else
write!(f, "{:?}:{}", value, ty)
} }
fn item_path_str(def_id: DefId) -> String { fn item_path_str(def_id: DefId) -> String {

View File

@ -1887,22 +1887,13 @@ impl<'tcx> Const<'tcx> {
}) })
} }
#[inline]
pub fn from_byval_value(
tcx: TyCtxt<'_, '_, 'tcx>,
val: Value,
ty: Ty<'tcx>,
) -> &'tcx Self {
Self::from_const_value(tcx, ConstValue::from_byval_value(val), ty)
}
#[inline] #[inline]
pub fn from_scalar( pub fn from_scalar(
tcx: TyCtxt<'_, '_, 'tcx>, tcx: TyCtxt<'_, '_, 'tcx>,
val: Scalar, val: Scalar,
ty: Ty<'tcx>, ty: Ty<'tcx>,
) -> &'tcx Self { ) -> &'tcx Self {
Self::from_const_value(tcx, ConstValue::from_scalar(val), ty) Self::from_const_value(tcx, ConstValue::Scalar(val), ty)
} }
#[inline] #[inline]
@ -1918,12 +1909,12 @@ impl<'tcx> Const<'tcx> {
let shift = 128 - size.bits(); let shift = 128 - size.bits();
let truncated = (bits << shift) >> shift; let truncated = (bits << shift) >> shift;
assert_eq!(truncated, bits, "from_bits called with untruncated value"); assert_eq!(truncated, bits, "from_bits called with untruncated value");
Self::from_scalar(tcx, Scalar::Bits { bits, defined: size.bits() as u8 }, ty.value) Self::from_scalar(tcx, Scalar::Bits { bits, size: size.bytes() as u8 }, ty.value)
} }
#[inline] #[inline]
pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self { pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
Self::from_scalar(tcx, Scalar::undef(), ty) Self::from_scalar(tcx, Scalar::Bits { bits: 0, size: 0 }, ty)
} }
#[inline] #[inline]
@ -1960,11 +1951,6 @@ impl<'tcx> Const<'tcx> {
self.val.to_byval_value() self.val.to_byval_value()
} }
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
self.val.to_scalar()
}
#[inline] #[inline]
pub fn assert_bits( pub fn assert_bits(
&self, &self,

View File

@ -39,10 +39,12 @@ pub fn scalar_to_llvm(
) -> &'ll Value { ) -> &'ll Value {
let bitsize = if layout.is_bool() { 1 } else { layout.value.size(cx).bits() }; let bitsize = if layout.is_bool() { 1 } else { layout.value.size(cx).bits() };
match cv { match cv {
Scalar::Bits { defined, .. } if (defined as u64) < bitsize || defined == 0 => { Scalar::Bits { size: 0, .. } => {
C_undef(Type::ix(cx, bitsize)) assert_eq!(0, layout.value.size(cx).bytes());
C_undef(Type::ix(cx, 0))
}, },
Scalar::Bits { bits, .. } => { Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.value.size(cx).bytes());
let llval = C_uint_big(Type::ix(cx, bitsize), bits); let llval = C_uint_big(Type::ix(cx, bitsize), bits);
if layout.value == layout::Pointer { if layout.value == layout::Pointer {
unsafe { llvm::LLVMConstIntToPtr(llval, llty) } unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
@ -192,7 +194,7 @@ impl FunctionCx<'a, 'll, 'tcx> {
mir::Field::new(field as usize), mir::Field::new(field as usize),
c, c,
)?; )?;
if let Some(prim) = field.to_scalar() { if let Some(prim) = field.val.try_to_scalar() {
let layout = bx.cx.layout_of(field_ty); let layout = bx.cx.layout_of(field_ty);
let scalar = match layout.abi { let scalar = match layout.abi {
layout::Abi::Scalar(ref x) => x, layout::Abi::Scalar(ref x) => x,

View File

@ -151,14 +151,14 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
let trunc = |n| { let trunc = |n| {
let param_ty = self.param_env.and(self.tcx.lift_to_global(&ty).unwrap()); let param_ty = self.param_env.and(self.tcx.lift_to_global(&ty).unwrap());
let bit_width = self.tcx.layout_of(param_ty).unwrap().size.bits(); let width = self.tcx.layout_of(param_ty).unwrap().size;
trace!("trunc {} with size {} and shift {}", n, bit_width, 128 - bit_width); trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
let shift = 128 - bit_width; let shift = 128 - width.bits();
let result = (n << shift) >> shift; let result = (n << shift) >> shift;
trace!("trunc result: {}", result); trace!("trunc result: {}", result);
ConstValue::Scalar(Scalar::Bits { ConstValue::Scalar(Scalar::Bits {
bits: result, bits: result,
defined: bit_width as u8, size: width.bytes() as u8,
}) })
}; };
@ -168,7 +168,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
let s = s.as_str(); let s = s.as_str();
let id = self.tcx.allocate_bytes(s.as_bytes()); let id = self.tcx.allocate_bytes(s.as_bytes());
let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, self.tcx); let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, self.tcx);
ConstValue::from_byval_value(value) ConstValue::from_byval_value(value).unwrap()
}, },
LitKind::ByteStr(ref data) => { LitKind::ByteStr(ref data) => {
let id = self.tcx.allocate_bytes(data); let id = self.tcx.allocate_bytes(data);
@ -176,7 +176,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
}, },
LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits { LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits {
bits: n as u128, bits: n as u128,
defined: 8, size: 1,
}), }),
LitKind::Int(n, _) if neg => { LitKind::Int(n, _) if neg => {
let n = n as i128; let n = n as i128;
@ -194,14 +194,8 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
}; };
parse_float(n, fty) parse_float(n, fty)
} }
LitKind::Bool(b) => ConstValue::Scalar(Scalar::Bits { LitKind::Bool(b) => ConstValue::Scalar(Scalar::from_bool(b)),
bits: b as u128, LitKind::Char(c) => ConstValue::Scalar(Scalar::from_char(c)),
defined: 8,
}),
LitKind::Char(c) => ConstValue::Scalar(Scalar::Bits {
bits: c as u128,
defined: 32,
}),
}; };
ty::Const::from_const_value(self.tcx, lit, ty) ty::Const::from_const_value(self.tcx, lit, ty)
} }

View File

@ -19,7 +19,7 @@ pub(crate) use self::check_match::check_match;
use interpret::{const_val_field, const_variant_index, self}; use interpret::{const_val_field, const_variant_index, self};
use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability}; use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability};
use rustc::mir::interpret::{Scalar, GlobalId, ConstValue, Value}; use rustc::mir::interpret::{Scalar, GlobalId, ConstValue};
use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region}; use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region};
use rustc::ty::subst::{Substs, Kind}; use rustc::ty::subst::{Substs, Kind};
use rustc::hir::{self, PatKind, RangeEnd}; use rustc::hir::{self, PatKind, RangeEnd};
@ -1080,8 +1080,9 @@ pub fn compare_const_vals<'a, 'tcx>(
l.partial_cmp(&r) l.partial_cmp(&r)
}, },
ty::TyInt(_) => { ty::TyInt(_) => {
let a = interpret::sign_extend(tcx, a, ty.value).expect("layout error for TyInt"); let layout = tcx.layout_of(ty).ok()?;
let b = interpret::sign_extend(tcx, b, ty.value).expect("layout error for TyInt"); let a = interpret::sign_extend(a, layout);
let b = interpret::sign_extend(b, layout);
Some((a as i128).cmp(&(b as i128))) Some((a as i128).cmp(&(b as i128)))
}, },
_ => Some(a.cmp(&b)), _ => Some(a.cmp(&b)),
@ -1090,16 +1091,16 @@ pub fn compare_const_vals<'a, 'tcx>(
if let ty::TyRef(_, rty, _) = ty.value.sty { if let ty::TyRef(_, rty, _) = ty.value.sty {
if let ty::TyStr = rty.sty { if let ty::TyStr = rty.sty {
match (a.to_byval_value(), b.to_byval_value()) { match (a.val, b.val) {
( (
Some(Value::ScalarPair( ConstValue::ScalarPair(
Scalar::Ptr(ptr_a), Scalar::Ptr(ptr_a),
len_a, len_a,
)), ),
Some(Value::ScalarPair( ConstValue::ScalarPair(
Scalar::Ptr(ptr_b), Scalar::Ptr(ptr_b),
len_b, len_b,
)) ),
) if ptr_a.offset.bytes() == 0 && ptr_b.offset.bytes() == 0 => { ) if ptr_a.offset.bytes() == 0 && ptr_b.offset.bytes() == 0 => {
if let Ok(len_a) = len_a.to_bits(tcx.data_layout.pointer_size) { if let Ok(len_a) = len_a.to_bits(tcx.data_layout.pointer_size) {
if let Ok(len_b) = len_b.to_bits(tcx.data_layout.pointer_size) { if let Ok(len_b) = len_b.to_bits(tcx.data_layout.pointer_size) {
@ -1142,7 +1143,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
let s = s.as_str(); let s = s.as_str();
let id = tcx.allocate_bytes(s.as_bytes()); let id = tcx.allocate_bytes(s.as_bytes());
let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, tcx); let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, tcx);
ConstValue::from_byval_value(value) ConstValue::from_byval_value(value).unwrap()
}, },
LitKind::ByteStr(ref data) => { LitKind::ByteStr(ref data) => {
let id = tcx.allocate_bytes(data); let id = tcx.allocate_bytes(data);
@ -1150,7 +1151,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
}, },
LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits { LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits {
bits: n as u128, bits: n as u128,
defined: 8, size: 1,
}), }),
LitKind::Int(n, _) => { LitKind::Int(n, _) => {
enum Int { enum Int {
@ -1188,10 +1189,10 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
Int::Signed(IntTy::I128)| Int::Unsigned(UintTy::U128) => n, Int::Signed(IntTy::I128)| Int::Unsigned(UintTy::U128) => n,
_ => bug!(), _ => bug!(),
}; };
let defined = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size.bits() as u8; let size = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size.bytes() as u8;
ConstValue::Scalar(Scalar::Bits { ConstValue::Scalar(Scalar::Bits {
bits: n, bits: n,
defined, size,
}) })
}, },
LitKind::Float(n, fty) => { LitKind::Float(n, fty) => {
@ -1204,14 +1205,8 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
}; };
parse_float(n, fty, neg).map_err(|_| LitToConstError::UnparseableFloat)? parse_float(n, fty, neg).map_err(|_| LitToConstError::UnparseableFloat)?
} }
LitKind::Bool(b) => ConstValue::Scalar(Scalar::Bits { LitKind::Bool(b) => ConstValue::Scalar(Scalar::from_bool(b)),
bits: b as u128, LitKind::Char(c) => ConstValue::Scalar(Scalar::from_char(c)),
defined: 8,
}),
LitKind::Char(c) => ConstValue::Scalar(Scalar::Bits {
bits: c as u128,
defined: 32,
}),
}; };
Ok(ty::Const::from_const_value(tcx, lit, ty)) Ok(ty::Const::from_const_value(tcx, lit, ty))
} }
@ -1224,7 +1219,7 @@ pub fn parse_float<'tcx>(
let num = num.as_str(); let num = num.as_str();
use rustc_apfloat::ieee::{Single, Double}; use rustc_apfloat::ieee::{Single, Double};
use rustc_apfloat::Float; use rustc_apfloat::Float;
let (bits, defined) = match fty { let (bits, size) = match fty {
ast::FloatTy::F32 => { ast::FloatTy::F32 => {
num.parse::<f32>().map_err(|_| ())?; num.parse::<f32>().map_err(|_| ())?;
let mut f = num.parse::<Single>().unwrap_or_else(|e| { let mut f = num.parse::<Single>().unwrap_or_else(|e| {
@ -1233,7 +1228,7 @@ pub fn parse_float<'tcx>(
if neg { if neg {
f = -f; f = -f;
} }
(f.to_bits(), 32) (f.to_bits(), 4)
} }
ast::FloatTy::F64 => { ast::FloatTy::F64 => {
num.parse::<f64>().map_err(|_| ())?; num.parse::<f64>().map_err(|_| ())?;
@ -1243,9 +1238,9 @@ pub fn parse_float<'tcx>(
if neg { if neg {
f = -f; f = -f;
} }
(f.to_bits(), 64) (f.to_bits(), 8)
} }
}; };
Ok(ConstValue::Scalar(Scalar::Bits { bits, defined })) Ok(ConstValue::Scalar(Scalar::Bits { bits, size }))
} }

View File

@ -1,5 +1,5 @@
use rustc::ty::{self, Ty}; use rustc::ty::{self, Ty};
use rustc::ty::layout::{self, LayoutOf}; use rustc::ty::layout::{self, LayoutOf, TyLayout};
use syntax::ast::{FloatTy, IntTy, UintTy}; use syntax::ast::{FloatTy, IntTy, UintTy};
use rustc_apfloat::ieee::{Single, Double}; use rustc_apfloat::ieee::{Single, Double};
@ -18,11 +18,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
dest_ty: Ty<'tcx>, dest_ty: Ty<'tcx>,
dest: Place, dest: Place,
) -> EvalResult<'tcx> { ) -> EvalResult<'tcx> {
let src_layout = self.layout_of(src.ty)?;
let dst_layout = self.layout_of(dest_ty)?;
use rustc::mir::CastKind::*; use rustc::mir::CastKind::*;
match kind { match kind {
Unsize => { Unsize => {
let src_layout = self.layout_of(src.ty)?;
let dst_layout = self.layout_of(dest_ty)?;
self.unsize_into(src.value, src_layout, dest, dst_layout)?; self.unsize_into(src.value, src_layout, dest, dst_layout)?;
} }
@ -57,16 +57,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let discr_val = def let discr_val = def
.discriminant_for_variant(*self.tcx, index) .discriminant_for_variant(*self.tcx, index)
.val; .val;
let defined = self
.layout_of(dest_ty)
.unwrap()
.size
.bits() as u8;
return self.write_scalar( return self.write_scalar(
dest, dest,
Scalar::Bits { Scalar::Bits {
bits: discr_val, bits: discr_val,
defined, size: dst_layout.size.bytes() as u8,
}, },
dest_ty); dest_ty);
} }
@ -76,9 +71,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
} }
let src_val = self.value_to_scalar(src)?; let src_val = self.value_to_scalar(src)?;
let dest_val = self.cast_scalar(src_val, src.ty, dest_ty)?; let dest_val = self.cast_scalar(src_val, src_layout, dst_layout)?;
let valty = ValTy { let valty = ValTy {
value: Value::Scalar(dest_val), value: Value::Scalar(dest_val.into()),
ty: dest_ty, ty: dest_ty,
}; };
self.write_value(valty, dest)?; self.write_value(valty, dest)?;
@ -100,7 +95,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
).ok_or_else(|| EvalErrorKind::TooGeneric.into()); ).ok_or_else(|| EvalErrorKind::TooGeneric.into());
let fn_ptr = self.memory.create_fn_alloc(instance?); let fn_ptr = self.memory.create_fn_alloc(instance?);
let valty = ValTy { let valty = ValTy {
value: Value::Scalar(fn_ptr.into()), value: Value::Scalar(Scalar::Ptr(fn_ptr.into()).into()),
ty: dest_ty, ty: dest_ty,
}; };
self.write_value(valty, dest)?; self.write_value(valty, dest)?;
@ -136,7 +131,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
); );
let fn_ptr = self.memory.create_fn_alloc(instance); let fn_ptr = self.memory.create_fn_alloc(instance);
let valty = ValTy { let valty = ValTy {
value: Value::Scalar(fn_ptr.into()), value: Value::Scalar(Scalar::Ptr(fn_ptr.into()).into()),
ty: dest_ty, ty: dest_ty,
}; };
self.write_value(valty, dest)?; self.write_value(valty, dest)?;
@ -151,20 +146,19 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
pub(super) fn cast_scalar( pub(super) fn cast_scalar(
&self, &self,
val: Scalar, val: Scalar,
src_ty: Ty<'tcx>, src_layout: TyLayout<'tcx>,
dest_ty: Ty<'tcx>, dest_layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, Scalar> { ) -> EvalResult<'tcx, Scalar> {
use rustc::ty::TypeVariants::*; use rustc::ty::TypeVariants::*;
trace!("Casting {:?}: {:?} to {:?}", val, src_ty, dest_ty); trace!("Casting {:?}: {:?} to {:?}", val, src_layout.ty, dest_layout.ty);
match val { match val {
Scalar::Bits { defined: 0, .. } => Ok(val), Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_layout.ty),
Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_ty), Scalar::Bits { bits, size } => {
Scalar::Bits { bits, .. } => { assert_eq!(size as u64, src_layout.size.bytes());
// TODO(oli-obk): check defined bits here match src_layout.ty.sty {
match src_ty.sty { TyFloat(fty) => self.cast_from_float(bits, fty, dest_layout.ty),
TyFloat(fty) => self.cast_from_float(bits, fty, dest_ty), _ => self.cast_from_int(bits, src_layout, dest_layout),
_ => self.cast_from_int(bits, src_ty, dest_ty),
} }
} }
} }
@ -173,56 +167,58 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
fn cast_from_int( fn cast_from_int(
&self, &self,
v: u128, v: u128,
src_ty: Ty<'tcx>, src_layout: TyLayout<'tcx>,
dest_ty: Ty<'tcx>, dest_layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, Scalar> { ) -> EvalResult<'tcx, Scalar> {
let signed = self.layout_of(src_ty)?.abi.is_signed(); let signed = src_layout.abi.is_signed();
let v = if signed { let v = if signed {
self.sign_extend(v, src_ty)? self.sign_extend(v, src_layout)
} else { } else {
v v
}; };
trace!("cast_from_int: {}, {}, {}", v, src_ty, dest_ty); trace!("cast_from_int: {}, {}, {}", v, src_layout.ty, dest_layout.ty);
use rustc::ty::TypeVariants::*; use rustc::ty::TypeVariants::*;
match dest_ty.sty { match dest_layout.ty.sty {
TyInt(_) | TyUint(_) => { TyInt(_) | TyUint(_) => {
let v = self.truncate(v, dest_ty)?; let v = self.truncate(v, dest_layout);
Ok(Scalar::Bits { Ok(Scalar::Bits {
bits: v, bits: v,
defined: self.layout_of(dest_ty).unwrap().size.bits() as u8, size: dest_layout.size.bytes() as u8,
}) })
} }
TyFloat(FloatTy::F32) if signed => Ok(Scalar::Bits { TyFloat(FloatTy::F32) if signed => Ok(Scalar::Bits {
bits: Single::from_i128(v as i128).value.to_bits(), bits: Single::from_i128(v as i128).value.to_bits(),
defined: 32, size: 4,
}), }),
TyFloat(FloatTy::F64) if signed => Ok(Scalar::Bits { TyFloat(FloatTy::F64) if signed => Ok(Scalar::Bits {
bits: Double::from_i128(v as i128).value.to_bits(), bits: Double::from_i128(v as i128).value.to_bits(),
defined: 64, size: 8,
}), }),
TyFloat(FloatTy::F32) => Ok(Scalar::Bits { TyFloat(FloatTy::F32) => Ok(Scalar::Bits {
bits: Single::from_u128(v).value.to_bits(), bits: Single::from_u128(v).value.to_bits(),
defined: 32, size: 4,
}), }),
TyFloat(FloatTy::F64) => Ok(Scalar::Bits { TyFloat(FloatTy::F64) => Ok(Scalar::Bits {
bits: Double::from_u128(v).value.to_bits(), bits: Double::from_u128(v).value.to_bits(),
defined: 64, size: 8,
}), }),
TyChar if v as u8 as u128 == v => Ok(Scalar::Bits { bits: v, defined: 32 }), TyChar => {
TyChar => err!(InvalidChar(v)), assert_eq!(v as u8 as u128, v);
Ok(Scalar::Bits { bits: v, size: 4 })
},
// No alignment check needed for raw pointers. But we have to truncate to target ptr size. // No alignment check needed for raw pointers. But we have to truncate to target ptr size.
TyRawPtr(_) => { TyRawPtr(_) => {
Ok(Scalar::Bits { Ok(Scalar::Bits {
bits: self.memory.truncate_to_ptr(v).0 as u128, bits: self.memory.truncate_to_ptr(v).0 as u128,
defined: self.memory.pointer_size().bits() as u8, size: self.memory.pointer_size().bytes() as u8,
}) })
}, },
// Casts to bool are not permitted by rustc, no need to handle them here. // Casts to bool are not permitted by rustc, no need to handle them here.
_ => err!(Unimplemented(format!("int to {:?} cast", dest_ty))), _ => err!(Unimplemented(format!("int to {:?} cast", dest_layout.ty))),
} }
} }
@ -236,11 +232,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
match fty { match fty {
FloatTy::F32 => Ok(Scalar::Bits { FloatTy::F32 => Ok(Scalar::Bits {
bits: Single::from_bits(bits).to_u128(width).value, bits: Single::from_bits(bits).to_u128(width).value,
defined: width as u8, size: (width / 8) as u8,
}), }),
FloatTy::F64 => Ok(Scalar::Bits { FloatTy::F64 => Ok(Scalar::Bits {
bits: Double::from_bits(bits).to_u128(width).value, bits: Double::from_bits(bits).to_u128(width).value,
defined: width as u8, size: (width / 8) as u8,
}), }),
} }
}, },
@ -250,11 +246,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
match fty { match fty {
FloatTy::F32 => Ok(Scalar::Bits { FloatTy::F32 => Ok(Scalar::Bits {
bits: Single::from_bits(bits).to_i128(width).value as u128, bits: Single::from_bits(bits).to_i128(width).value as u128,
defined: width as u8, size: (width / 8) as u8,
}), }),
FloatTy::F64 => Ok(Scalar::Bits { FloatTy::F64 => Ok(Scalar::Bits {
bits: Double::from_bits(bits).to_i128(width).value as u128, bits: Double::from_bits(bits).to_i128(width).value as u128,
defined: width as u8, size: (width / 8) as u8,
}), }),
} }
}, },
@ -262,24 +258,24 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
TyFloat(FloatTy::F32) if fty == FloatTy::F64 => { TyFloat(FloatTy::F32) if fty == FloatTy::F64 => {
Ok(Scalar::Bits { Ok(Scalar::Bits {
bits: Single::to_bits(Double::from_bits(bits).convert(&mut false).value), bits: Single::to_bits(Double::from_bits(bits).convert(&mut false).value),
defined: 32, size: 4,
}) })
}, },
// f32 -> f64 // f32 -> f64
TyFloat(FloatTy::F64) if fty == FloatTy::F32 => { TyFloat(FloatTy::F64) if fty == FloatTy::F32 => {
Ok(Scalar::Bits { Ok(Scalar::Bits {
bits: Double::to_bits(Single::from_bits(bits).convert(&mut false).value), bits: Double::to_bits(Single::from_bits(bits).convert(&mut false).value),
defined: 64, size: 8,
}) })
}, },
// identity cast // identity cast
TyFloat(FloatTy:: F64) => Ok(Scalar::Bits { TyFloat(FloatTy:: F64) => Ok(Scalar::Bits {
bits, bits,
defined: 64, size: 8,
}), }),
TyFloat(FloatTy:: F32) => Ok(Scalar::Bits { TyFloat(FloatTy:: F32) => Ok(Scalar::Bits {
bits, bits,
defined: 32, size: 4,
}), }),
_ => err!(Unimplemented(format!("float to {:?} cast", dest_ty))), _ => err!(Unimplemented(format!("float to {:?} cast", dest_ty))),
} }

View File

@ -2,11 +2,12 @@ use std::fmt;
use std::error::Error; use std::error::Error;
use rustc::hir; use rustc::hir;
use rustc::mir::interpret::{ConstEvalErr}; use rustc::mir::interpret::{ConstEvalErr, ScalarMaybeUndef};
use rustc::mir; use rustc::mir;
use rustc::ty::{self, TyCtxt, Ty, Instance}; use rustc::ty::{self, TyCtxt, Ty, Instance};
use rustc::ty::layout::{self, LayoutOf, Primitive, TyLayout}; use rustc::ty::layout::{self, LayoutOf, Primitive, TyLayout};
use rustc::ty::subst::Subst; use rustc::ty::subst::Subst;
use rustc_data_structures::indexed_vec::IndexVec;
use syntax::ast::Mutability; use syntax::ast::Mutability;
use syntax::codemap::Span; use syntax::codemap::Span;
@ -28,13 +29,16 @@ pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
let param_env = tcx.param_env(instance.def_id()); let param_env = tcx.param_env(instance.def_id());
let mut ecx = EvalContext::new(tcx.at(span), param_env, CompileTimeEvaluator, ()); let mut ecx = EvalContext::new(tcx.at(span), param_env, CompileTimeEvaluator, ());
// insert a stack frame so any queries have the correct substs // insert a stack frame so any queries have the correct substs
ecx.push_stack_frame( ecx.stack.push(super::eval_context::Frame {
block: mir::START_BLOCK,
locals: IndexVec::new(),
instance, instance,
span, span,
mir, mir,
Place::undef(), return_place: Place::undef(),
StackPopCleanup::None, return_to_block: StackPopCleanup::None,
)?; stmt: 0,
});
Ok(ecx) Ok(ecx)
} }
@ -76,7 +80,7 @@ pub fn value_to_const_value<'tcx>(
) -> &'tcx ty::Const<'tcx> { ) -> &'tcx ty::Const<'tcx> {
let layout = ecx.layout_of(ty).unwrap(); let layout = ecx.layout_of(ty).unwrap();
match (val, &layout.abi) { match (val, &layout.abi) {
(Value::Scalar(Scalar::Bits { defined: 0, ..}), _) if layout.is_zst() => {}, (Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size: 0, ..})), _) if layout.is_zst() => {},
(Value::ByRef(..), _) | (Value::ByRef(..), _) |
(Value::Scalar(_), &layout::Abi::Scalar(_)) | (Value::Scalar(_), &layout::Abi::Scalar(_)) |
(Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) => {}, (Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) => {},
@ -84,8 +88,8 @@ pub fn value_to_const_value<'tcx>(
} }
let val = (|| { let val = (|| {
match val { match val {
Value::Scalar(val) => Ok(ConstValue::Scalar(val)), Value::Scalar(val) => Ok(ConstValue::Scalar(val.read()?)),
Value::ScalarPair(a, b) => Ok(ConstValue::ScalarPair(a, b)), Value::ScalarPair(a, b) => Ok(ConstValue::ScalarPair(a.read()?, b.read()?)),
Value::ByRef(ptr, align) => { Value::ByRef(ptr, align) => {
let ptr = ptr.to_ptr().unwrap(); let ptr = ptr.to_ptr().unwrap();
let alloc = ecx.memory.get(ptr.alloc_id)?; let alloc = ecx.memory.get(ptr.alloc_id)?;
@ -307,7 +311,7 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
let elem_align = ecx.layout_of(elem_ty)?.align.abi(); let elem_align = ecx.layout_of(elem_ty)?.align.abi();
let align_val = Scalar::Bits { let align_val = Scalar::Bits {
bits: elem_align as u128, bits: elem_align as u128,
defined: dest_layout.size.bits() as u8, size: dest_layout.size.bytes() as u8,
}; };
ecx.write_scalar(dest, align_val, dest_layout.ty)?; ecx.write_scalar(dest, align_val, dest_layout.ty)?;
} }
@ -317,7 +321,7 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
let size = ecx.layout_of(ty)?.size.bytes() as u128; let size = ecx.layout_of(ty)?.size.bytes() as u128;
let size_val = Scalar::Bits { let size_val = Scalar::Bits {
bits: size, bits: size,
defined: dest_layout.size.bits() as u8, size: dest_layout.size.bytes() as u8,
}; };
ecx.write_scalar(dest, size_val, dest_layout.ty)?; ecx.write_scalar(dest, size_val, dest_layout.ty)?;
} }
@ -327,7 +331,7 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
let type_id = ecx.tcx.type_id_hash(ty) as u128; let type_id = ecx.tcx.type_id_hash(ty) as u128;
let id_val = Scalar::Bits { let id_val = Scalar::Bits {
bits: type_id, bits: type_id,
defined: dest_layout.size.bits() as u8, size: dest_layout.size.bytes() as u8,
}; };
ecx.write_scalar(dest, id_val, dest_layout.ty)?; ecx.write_scalar(dest, id_val, dest_layout.ty)?;
} }
@ -437,7 +441,7 @@ pub fn const_val_field<'a, 'tcx>(
let place = ecx.allocate_place_for_value(value, layout, variant)?; let place = ecx.allocate_place_for_value(value, layout, variant)?;
let (place, layout) = ecx.place_field(place, field, layout)?; let (place, layout) = ecx.place_field(place, field, layout)?;
let (ptr, align) = place.to_ptr_align(); let (ptr, align) = place.to_ptr_align();
let mut new_value = Value::ByRef(ptr, align); let mut new_value = Value::ByRef(ptr.read()?, align);
new_value = ecx.try_read_by_ref(new_value, layout.ty)?; new_value = ecx.try_read_by_ref(new_value, layout.ty)?;
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
match (value, new_value) { match (value, new_value) {
@ -562,6 +566,9 @@ pub fn const_eval_provider<'a, 'tcx>(
}; };
if tcx.is_static(def_id).is_some() { if tcx.is_static(def_id).is_some() {
err.report_as_error(ecx.tcx, "could not evaluate static initializer"); err.report_as_error(ecx.tcx, "could not evaluate static initializer");
if tcx.sess.err_count() == 0 {
span_bug!(span, "static eval failure didn't emit an error: {:#?}", err);
}
} }
err.into() err.into()
}) })
@ -572,11 +579,11 @@ fn numeric_intrinsic<'tcx>(
bits: u128, bits: u128,
kind: Primitive, kind: Primitive,
) -> EvalResult<'tcx, Scalar> { ) -> EvalResult<'tcx, Scalar> {
let defined = match kind { let size = match kind {
Primitive::Int(integer, _) => integer.size().bits() as u8, Primitive::Int(integer, _) => integer.size(),
_ => bug!("invalid `{}` argument: {:?}", name, bits), _ => bug!("invalid `{}` argument: {:?}", name, bits),
}; };
let extra = 128 - defined as u128; let extra = 128 - size.bits() as u128;
let bits_out = match name { let bits_out = match name {
"ctpop" => bits.count_ones() as u128, "ctpop" => bits.count_ones() as u128,
"ctlz" => bits.leading_zeros() as u128 - extra, "ctlz" => bits.leading_zeros() as u128 - extra,
@ -584,5 +591,5 @@ fn numeric_intrinsic<'tcx>(
"bswap" => (bits << extra).swap_bytes(), "bswap" => (bits << extra).swap_bytes(),
_ => bug!("not a numeric intrinsic: {}", name), _ => bug!("not a numeric intrinsic: {}", name),
}; };
Ok(Scalar::Bits { bits: bits_out, defined }) Ok(Scalar::Bits { bits: bits_out, size: size.bytes() as u8 })
} }

View File

@ -15,6 +15,7 @@ use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc::mir::interpret::{ use rustc::mir::interpret::{
GlobalId, Value, Scalar, FrameInfo, AllocType, GlobalId, Value, Scalar, FrameInfo, AllocType,
EvalResult, EvalErrorKind, Pointer, ConstValue, EvalResult, EvalErrorKind, Pointer, ConstValue,
ScalarMaybeUndef,
}; };
use syntax::codemap::{self, Span}; use syntax::codemap::{self, Span};
@ -105,9 +106,7 @@ pub struct Frame<'mir, 'tcx: 'mir> {
/// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s. /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
/// `None` represents a local that is currently dead, while a live local /// `None` represents a local that is currently dead, while a live local
/// can either directly contain `Scalar` or refer to some part of an `Allocation`. /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
/// pub locals: IndexVec<mir::Local, LocalValue>,
/// Before being initialized, arguments are `Value::Scalar(Scalar::undef())` and other locals are `None`.
pub locals: IndexVec<mir::Local, Option<Value>>,
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Current position within the function // Current position within the function
@ -120,6 +119,21 @@ pub struct Frame<'mir, 'tcx: 'mir> {
pub stmt: usize, pub stmt: usize,
} }
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum LocalValue {
Dead,
Live(Value),
}
impl LocalValue {
pub fn access(self) -> EvalResult<'static, Value> {
match self {
LocalValue::Dead => err!(DeadLocal),
LocalValue::Live(val) => Ok(val),
}
}
}
impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {} impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> { impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> {
@ -395,8 +409,8 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let id = self.memory.allocate_value(alloc.clone(), MemoryKind::Stack)?; let id = self.memory.allocate_value(alloc.clone(), MemoryKind::Stack)?;
Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align)) Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align))
}, },
ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a, b)), ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a.into(), b.into())),
ConstValue::Scalar(val) => Ok(Value::Scalar(val)), ConstValue::Scalar(val) => Ok(Value::Scalar(val.into())),
} }
} }
@ -538,8 +552,26 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
) -> EvalResult<'tcx> { ) -> EvalResult<'tcx> {
::log_settings::settings().indentation += 1; ::log_settings::settings().indentation += 1;
let locals = if mir.local_decls.len() > 1 { // first push a stack frame so we have access to the local substs
let mut locals = IndexVec::from_elem(Some(Value::Scalar(Scalar::undef())), &mir.local_decls); self.stack.push(Frame {
mir,
block: mir::START_BLOCK,
return_to_block,
return_place,
// empty local array, we fill it in below, after we are inside the stack frame and
// all methods actually know about the frame
locals: IndexVec::new(),
span,
instance,
stmt: 0,
});
// don't allocate at all for trivial constants
if mir.local_decls.len() > 1 {
let mut locals = IndexVec::from_elem(LocalValue::Dead, &mir.local_decls);
for (local, decl) in locals.iter_mut().zip(mir.local_decls.iter()) {
*local = LocalValue::Live(self.init_value(decl.ty)?);
}
match self.tcx.describe_def(instance.def_id()) { match self.tcx.describe_def(instance.def_id()) {
// statics and constants don't have `Storage*` statements, no need to look for them // statics and constants don't have `Storage*` statements, no need to look for them
Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {}, Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
@ -550,29 +582,15 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
use rustc::mir::StatementKind::{StorageDead, StorageLive}; use rustc::mir::StatementKind::{StorageDead, StorageLive};
match stmt.kind { match stmt.kind {
StorageLive(local) | StorageLive(local) |
StorageDead(local) => locals[local] = None, StorageDead(local) => locals[local] = LocalValue::Dead,
_ => {} _ => {}
} }
} }
} }
}, },
} }
locals self.frame_mut().locals = locals;
} else { }
// don't allocate at all for trivial constants
IndexVec::new()
};
self.stack.push(Frame {
mir,
block: mir::START_BLOCK,
return_to_block,
return_place,
locals,
span,
instance,
stmt: 0,
});
self.memory.cur_frame = self.cur_frame(); self.memory.cur_frame = self.cur_frame();
@ -598,7 +616,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
if let Place::Ptr { ptr, .. } = frame.return_place { if let Place::Ptr { ptr, .. } = frame.return_place {
// FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
self.memory.mark_static_initialized( self.memory.mark_static_initialized(
ptr.to_ptr()?.alloc_id, ptr.read()?.to_ptr()?.alloc_id,
mutable, mutable,
)? )?
} else { } else {
@ -616,8 +634,9 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
Ok(()) Ok(())
} }
pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> { pub fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> {
if let Some(Value::ByRef(ptr, _align)) = local { // FIXME: should we tell the user that there was a local which was never written to?
if let LocalValue::Live(Value::ByRef(ptr, _align)) = local {
trace!("deallocating local"); trace!("deallocating local");
let ptr = ptr.to_ptr()?; let ptr = ptr.to_ptr()?;
self.memory.dump_alloc(ptr.alloc_id); self.memory.dump_alloc(ptr.alloc_id);
@ -637,6 +656,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
) -> EvalResult<'tcx> { ) -> EvalResult<'tcx> {
let dest = self.eval_place(place)?; let dest = self.eval_place(place)?;
let dest_ty = self.place_ty(place); let dest_ty = self.place_ty(place);
let dest_layout = self.layout_of(dest_ty)?;
use rustc::mir::Rvalue::*; use rustc::mir::Rvalue::*;
match *rvalue { match *rvalue {
@ -675,7 +695,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
UnaryOp(un_op, ref operand) => { UnaryOp(un_op, ref operand) => {
let val = self.eval_operand_to_scalar(operand)?; let val = self.eval_operand_to_scalar(operand)?;
let val = self.unary_op(un_op, val, dest_ty)?; let val = self.unary_op(un_op, val, dest_layout)?;
self.write_scalar( self.write_scalar(
dest, dest,
val, val,
@ -724,6 +744,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align(); let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
if length > 0 { if length > 0 {
let dest = dest.read()?;
//write the first value //write the first value
self.write_value_to_ptr(value, dest, dest_align, elem_ty)?; self.write_value_to_ptr(value, dest, dest_align, elem_ty)?;
@ -739,12 +760,12 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let src = self.eval_place(place)?; let src = self.eval_place(place)?;
let ty = self.place_ty(place); let ty = self.place_ty(place);
let (_, len) = src.elem_ty_and_len(ty, self.tcx.tcx); let (_, len) = src.elem_ty_and_len(ty, self.tcx.tcx);
let defined = self.memory.pointer_size().bits() as u8; let size = self.memory.pointer_size().bytes() as u8;
self.write_scalar( self.write_scalar(
dest, dest,
Scalar::Bits { Scalar::Bits {
bits: len as u128, bits: len as u128,
defined, size,
}, },
dest_ty, dest_ty,
)?; )?;
@ -757,7 +778,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra(); let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
let val = match extra { let val = match extra {
PlaceExtra::None => ptr.to_value(), PlaceExtra::None => Value::Scalar(ptr),
PlaceExtra::Length(len) => ptr.to_value_with_len(len, self.tcx.tcx), PlaceExtra::Length(len) => ptr.to_value_with_len(len, self.tcx.tcx),
PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable), PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
PlaceExtra::DowncastVariant(..) => { PlaceExtra::DowncastVariant(..) => {
@ -781,12 +802,12 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let layout = self.layout_of(ty)?; let layout = self.layout_of(ty)?;
assert!(!layout.is_unsized(), assert!(!layout.is_unsized(),
"SizeOf nullary MIR operator called for unsized type"); "SizeOf nullary MIR operator called for unsized type");
let defined = self.memory.pointer_size().bits() as u8; let size = self.memory.pointer_size().bytes() as u8;
self.write_scalar( self.write_scalar(
dest, dest,
Scalar::Bits { Scalar::Bits {
bits: layout.size.bytes() as u128, bits: layout.size.bytes() as u128,
defined, size,
}, },
dest_ty, dest_ty,
)?; )?;
@ -803,10 +824,10 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let layout = self.layout_of(ty)?; let layout = self.layout_of(ty)?;
let place = self.eval_place(place)?; let place = self.eval_place(place)?;
let discr_val = self.read_discriminant_value(place, layout)?; let discr_val = self.read_discriminant_value(place, layout)?;
let defined = self.layout_of(dest_ty).unwrap().size.bits() as u8; let size = self.layout_of(dest_ty).unwrap().size.bytes() as u8;
self.write_scalar(dest, Scalar::Bits { self.write_scalar(dest, Scalar::Bits {
bits: discr_val, bits: discr_val,
defined, size,
}, dest_ty)?; }, dest_ty)?;
} }
} }
@ -957,10 +978,8 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
assert!(variants_start == variants_end); assert!(variants_start == variants_end);
dataful_variant as u128 dataful_variant as u128
}, },
Scalar::Bits { bits: raw_discr, defined } => { Scalar::Bits { bits: raw_discr, size } => {
if defined < discr.size.bits() as u8 { assert_eq!(size as u64, discr.size.bytes());
return err!(ReadUndefBytes);
}
let discr = raw_discr.wrapping_sub(niche_start) let discr = raw_discr.wrapping_sub(niche_start)
.wrapping_add(variants_start); .wrapping_add(variants_start);
if variants_start <= discr && discr <= variants_end { if variants_start <= discr && discr <= variants_end {
@ -1002,14 +1021,14 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
// raw discriminants for enums are isize or bigger during // raw discriminants for enums are isize or bigger during
// their computation, but the in-memory tag is the smallest possible // their computation, but the in-memory tag is the smallest possible
// representation // representation
let size = tag.value.size(self.tcx.tcx).bits(); let size = tag.value.size(self.tcx.tcx);
let shift = 128 - size; let shift = 128 - size.bits();
let discr_val = (discr_val << shift) >> shift; let discr_val = (discr_val << shift) >> shift;
let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?; let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?;
self.write_scalar(discr_dest, Scalar::Bits { self.write_scalar(discr_dest, Scalar::Bits {
bits: discr_val, bits: discr_val,
defined: size as u8, size: size.bytes() as u8,
}, tag.ty)?; }, tag.ty)?;
} }
layout::Variants::NicheFilling { layout::Variants::NicheFilling {
@ -1025,7 +1044,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
.wrapping_add(niche_start); .wrapping_add(niche_start);
self.write_scalar(niche_dest, Scalar::Bits { self.write_scalar(niche_dest, Scalar::Bits {
bits: niche_value, bits: niche_value,
defined: niche.size.bits() as u8, size: niche.size.bytes() as u8,
}, niche.ty)?; }, niche.ty)?;
} }
} }
@ -1072,22 +1091,22 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> { pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
let new_place = match place { let new_place = match place {
Place::Local { frame, local } => { Place::Local { frame, local } => {
match self.stack[frame].locals[local] { match self.stack[frame].locals[local].access()? {
None => return err!(DeadLocal), Value::ByRef(ptr, align) => {
Some(Value::ByRef(ptr, align)) => {
Place::Ptr { Place::Ptr {
ptr, ptr: ptr.into(),
align, align,
extra: PlaceExtra::None, extra: PlaceExtra::None,
} }
} }
Some(val) => { val => {
let ty = self.stack[frame].mir.local_decls[local].ty; let ty = self.stack[frame].mir.local_decls[local].ty;
let ty = self.monomorphize(ty, self.stack[frame].instance.substs); let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
let layout = self.layout_of(ty)?; let layout = self.layout_of(ty)?;
let ptr = self.alloc_ptr(layout)?; let ptr = self.alloc_ptr(layout)?;
self.stack[frame].locals[local] = self.stack[frame].locals[local] =
Some(Value::ByRef(ptr.into(), layout.align)); // it stays live LocalValue::Live(Value::ByRef(ptr.into(), layout.align)); // it stays live
let place = Place::from_ptr(ptr, layout.align); let place = Place::from_ptr(ptr, layout.align);
self.write_value(ValTy { value: val, ty }, place)?; self.write_value(ValTy { value: val, ty }, place)?;
place place
@ -1137,11 +1156,11 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
pub fn write_scalar( pub fn write_scalar(
&mut self, &mut self,
dest: Place, dest: Place,
val: Scalar, val: impl Into<ScalarMaybeUndef>,
dest_ty: Ty<'tcx>, dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> { ) -> EvalResult<'tcx> {
let valty = ValTy { let valty = ValTy {
value: Value::Scalar(val), value: Value::Scalar(val.into()),
ty: dest_ty, ty: dest_ty,
}; };
self.write_value(valty, dest) self.write_value(valty, dest)
@ -1160,15 +1179,15 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
match dest { match dest {
Place::Ptr { ptr, align, extra } => { Place::Ptr { ptr, align, extra } => {
assert_eq!(extra, PlaceExtra::None); assert_eq!(extra, PlaceExtra::None);
self.write_value_to_ptr(src_val, ptr, align, dest_ty) self.write_value_to_ptr(src_val, ptr.read()?, align, dest_ty)
} }
Place::Local { frame, local } => { Place::Local { frame, local } => {
let dest = self.stack[frame].get_local(local)?; let old_val = self.stack[frame].locals[local].access()?;
self.write_value_possibly_by_val( self.write_value_possibly_by_val(
src_val, src_val,
|this, val| this.stack[frame].set_local(local, val), |this, val| this.stack[frame].set_local(local, val),
dest, old_val,
dest_ty, dest_ty,
) )
} }
@ -1183,6 +1202,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
old_dest_val: Value, old_dest_val: Value,
dest_ty: Ty<'tcx>, dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> { ) -> EvalResult<'tcx> {
// FIXME: this should be a layout check, not underlying value
if let Value::ByRef(dest_ptr, align) = old_dest_val { if let Value::ByRef(dest_ptr, align) = old_dest_val {
// If the value is already `ByRef` (that is, backed by an `Allocation`), // If the value is already `ByRef` (that is, backed by an `Allocation`),
// then we must write the new value into this allocation, because there may be // then we must write the new value into this allocation, because there may be
@ -1239,10 +1259,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
layout::Primitive::Int(_, signed) => signed, layout::Primitive::Int(_, signed) => signed,
_ => false, _ => false,
}, },
_ => match scalar { _ => false,
Scalar::Bits { defined: 0, .. } => false,
_ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout),
}
}; };
self.memory.write_scalar(dest, dest_align, scalar, layout.size, signed) self.memory.write_scalar(dest, dest_align, scalar, layout.size, signed)
} }
@ -1278,20 +1295,22 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
pointee_ty: Ty<'tcx>, pointee_ty: Ty<'tcx>,
) -> EvalResult<'tcx, Value> { ) -> EvalResult<'tcx, Value> {
let ptr_size = self.memory.pointer_size(); let ptr_size = self.memory.pointer_size();
let p: Scalar = self.memory.read_ptr_sized(ptr, ptr_align)?.into(); let p: ScalarMaybeUndef = self.memory.read_ptr_sized(ptr, ptr_align)?;
if self.type_is_sized(pointee_ty) { if self.type_is_sized(pointee_ty) {
Ok(p.to_value()) Ok(Value::Scalar(p))
} else { } else {
trace!("reading fat pointer extra of type {}", pointee_ty); trace!("reading fat pointer extra of type {}", pointee_ty);
let extra = ptr.offset(ptr_size, self)?; let extra = ptr.offset(ptr_size, self)?;
match self.tcx.struct_tail(pointee_ty).sty { match self.tcx.struct_tail(pointee_ty).sty {
ty::TyDynamic(..) => Ok(p.to_value_with_vtable( ty::TyDynamic(..) => Ok(Value::ScalarPair(
self.memory.read_ptr_sized(extra, ptr_align)?.to_ptr()?, p,
self.memory.read_ptr_sized(extra, ptr_align)?,
)), )),
ty::TySlice(..) | ty::TyStr => { ty::TySlice(..) | ty::TyStr => {
let len = self let len = self
.memory .memory
.read_ptr_sized(extra, ptr_align)? .read_ptr_sized(extra, ptr_align)?
.read()?
.to_bits(ptr_size)?; .to_bits(ptr_size)?;
Ok(p.to_value_with_len(len as u64, self.tcx.tcx)) Ok(p.to_value_with_len(len as u64, self.tcx.tcx))
}, },
@ -1347,8 +1366,10 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
match ty.sty { match ty.sty {
ty::TyChar => { ty::TyChar => {
assert_eq!(size.bytes(), 4); assert_eq!(size.bytes(), 4);
if ::std::char::from_u32(bits as u32).is_none() { let c = self.memory.read_scalar(ptr, ptr_align, Size::from_bytes(4))?.read()?.to_bits(Size::from_bytes(4))? as u32;
return err!(InvalidChar(bits)); match ::std::char::from_u32(c) {
Some(..) => (),
None => return err!(InvalidChar(c as u128)),
} }
} }
_ => {}, _ => {},
@ -1534,7 +1555,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
self.memory.check_align(ptr, ptr_align)?; self.memory.check_align(ptr, ptr_align)?;
if layout.size.bytes() == 0 { if layout.size.bytes() == 0 {
return Ok(Some(Value::Scalar(Scalar::undef()))); return Ok(Some(Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 }))));
} }
let ptr = ptr.to_ptr()?; let ptr = ptr.to_ptr()?;
@ -1670,7 +1691,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
} }
let (src_f_value, src_field) = match src { let (src_f_value, src_field) = match src {
Value::ByRef(ptr, align) => { Value::ByRef(ptr, align) => {
let src_place = Place::from_scalar_ptr(ptr, align); let src_place = Place::from_scalar_ptr(ptr.into(), align);
let (src_f_place, src_field) = let (src_f_place, src_field) =
self.place_field(src_place, mir::Field::new(i), src_layout)?; self.place_field(src_place, mir::Field::new(i), src_layout)?;
(self.read_place(src_f_place)?, src_field) (self.read_place(src_f_place)?, src_field)
@ -1717,7 +1738,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
} }
write!(msg, ":").unwrap(); write!(msg, ":").unwrap();
match self.stack[frame].get_local(local) { match self.stack[frame].locals[local].access() {
Err(err) => { Err(err) => {
if let EvalErrorKind::DeadLocal = err.kind { if let EvalErrorKind::DeadLocal = err.kind {
write!(msg, " is dead").unwrap(); write!(msg, " is dead").unwrap();
@ -1736,16 +1757,16 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
} }
Ok(Value::Scalar(val)) => { Ok(Value::Scalar(val)) => {
write!(msg, " {:?}", val).unwrap(); write!(msg, " {:?}", val).unwrap();
if let Scalar::Ptr(ptr) = val { if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
allocs.push(ptr.alloc_id); allocs.push(ptr.alloc_id);
} }
} }
Ok(Value::ScalarPair(val1, val2)) => { Ok(Value::ScalarPair(val1, val2)) => {
write!(msg, " ({:?}, {:?})", val1, val2).unwrap(); write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
if let Scalar::Ptr(ptr) = val1 { if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
allocs.push(ptr.alloc_id); allocs.push(ptr.alloc_id);
} }
if let Scalar::Ptr(ptr) = val2 { if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
allocs.push(ptr.alloc_id); allocs.push(ptr.alloc_id);
} }
} }
@ -1756,7 +1777,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
} }
Place::Ptr { ptr, align, .. } => { Place::Ptr { ptr, align, .. } => {
match ptr { match ptr {
Scalar::Ptr(ptr) => { ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) => {
trace!("by align({}) ref:", align.abi()); trace!("by align({}) ref:", align.abi());
self.memory.dump_alloc(ptr.alloc_id); self.memory.dump_alloc(ptr.alloc_id);
} }
@ -1766,21 +1787,6 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
} }
} }
/// Convenience function to ensure correct usage of locals
pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
where
F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
{
let val = self.stack[frame].get_local(local)?;
let new_val = f(self, val)?;
self.stack[frame].set_local(local, new_val)?;
// FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
// if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
// self.memory.deallocate(ptr)?;
// }
Ok(())
}
pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) { pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
let mut last_span = None; let mut last_span = None;
let mut frames = Vec::new(); let mut frames = Vec::new();
@ -1819,12 +1825,12 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
(frames, self.tcx.span) (frames, self.tcx.span)
} }
pub fn sign_extend(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> { pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
super::sign_extend(self.tcx.tcx, value, ty) super::sign_extend(value, ty)
} }
pub fn truncate(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> { pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
super::truncate(self.tcx.tcx, value, ty) super::truncate(value, ty)
} }
fn write_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usize) -> ::std::fmt::Result { fn write_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usize) -> ::std::fmt::Result {
@ -1893,34 +1899,45 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
} }
} }
} }
pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> {
trace!("{:?} is now live", local);
let ty = self.frame().mir.local_decls[local].ty;
let init = self.init_value(ty)?;
// StorageLive *always* kills the value that's currently stored
Ok(mem::replace(&mut self.frame_mut().locals[local], LocalValue::Live(init)))
}
fn init_value(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
let ty = self.monomorphize(ty, self.substs());
let layout = self.layout_of(ty)?;
Ok(match layout.abi {
layout::Abi::Scalar(..) => Value::Scalar(ScalarMaybeUndef::Undef),
layout::Abi::ScalarPair(..) => Value::ScalarPair(
ScalarMaybeUndef::Undef,
ScalarMaybeUndef::Undef,
),
_ => Value::ByRef(self.alloc_ptr(ty)?.into(), layout.align),
})
}
} }
impl<'mir, 'tcx> Frame<'mir, 'tcx> { impl<'mir, 'tcx> Frame<'mir, 'tcx> {
pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
self.locals[local].ok_or_else(|| EvalErrorKind::DeadLocal.into())
}
fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> { fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
match self.locals[local] { match self.locals[local] {
None => err!(DeadLocal), LocalValue::Dead => err!(DeadLocal),
Some(ref mut local) => { LocalValue::Live(ref mut local) => {
*local = value; *local = value;
Ok(()) Ok(())
} }
} }
} }
pub fn storage_live(&mut self, local: mir::Local) -> Option<Value> {
trace!("{:?} is now live", local);
// StorageLive *always* kills the value that's currently stored
mem::replace(&mut self.locals[local], Some(Value::Scalar(Scalar::undef())))
}
/// Returns the old value of the local /// Returns the old value of the local
pub fn storage_dead(&mut self, local: mir::Local) -> Option<Value> { pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue {
trace!("{:?} is now dead", local); trace!("{:?} is now dead", local);
self.locals[local].take() mem::replace(&mut self.locals[local], LocalValue::Dead)
} }
} }

View File

@ -7,7 +7,7 @@ use rustc::ty::Instance;
use rustc::ty::ParamEnv; use rustc::ty::ParamEnv;
use rustc::ty::query::TyCtxtAt; use rustc::ty::query::TyCtxtAt;
use rustc::ty::layout::{self, Align, TargetDataLayout, Size}; use rustc::ty::layout::{self, Align, TargetDataLayout, Size};
use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value, use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value, ScalarMaybeUndef,
EvalResult, Scalar, EvalErrorKind, GlobalId, AllocType}; EvalResult, Scalar, EvalErrorKind, GlobalId, AllocType};
pub use rustc::mir::interpret::{write_target_uint, write_target_int, read_target_uint}; pub use rustc::mir::interpret::{write_target_uint, write_target_int, read_target_uint};
use rustc_data_structures::fx::{FxHashSet, FxHashMap, FxHasher}; use rustc_data_structures::fx::{FxHashSet, FxHashMap, FxHasher};
@ -272,10 +272,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
let alloc = self.get(ptr.alloc_id)?; let alloc = self.get(ptr.alloc_id)?;
(ptr.offset.bytes(), alloc.align) (ptr.offset.bytes(), alloc.align)
} }
Scalar::Bits { bits, defined } => { Scalar::Bits { bits, size } => {
if (defined as u64) < self.pointer_size().bits() { assert_eq!(size as u64, self.pointer_size().bytes());
return err!(ReadUndefBytes);
}
// FIXME: what on earth does this line do? docs or fix needed! // FIXME: what on earth does this line do? docs or fix needed!
let v = ((bits as u128) % (1 << self.pointer_size().bytes())) as u64; let v = ((bits as u128) % (1 << self.pointer_size().bytes())) as u64;
if v == 0 { if v == 0 {
@ -756,7 +754,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
Ok(()) Ok(())
} }
pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalResult<'tcx, Scalar> { pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalResult<'tcx, ScalarMaybeUndef> {
self.check_relocation_edges(ptr, size)?; // Make sure we don't read part of a pointer as a pointer self.check_relocation_edges(ptr, size)?; // Make sure we don't read part of a pointer as a pointer
let endianness = self.endianness(); let endianness = self.endianness();
let bytes = self.get_bytes_unchecked(ptr, size, ptr_align.min(self.int_align(size)))?; let bytes = self.get_bytes_unchecked(ptr, size, ptr_align.min(self.int_align(size)))?;
@ -764,7 +762,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
// We must not return Ok() for unaligned pointers! // We must not return Ok() for unaligned pointers!
if self.check_defined(ptr, size).is_err() { if self.check_defined(ptr, size).is_err() {
// this inflates undefined bytes to the entire scalar, even if only a few bytes are undefined // this inflates undefined bytes to the entire scalar, even if only a few bytes are undefined
return Ok(Scalar::undef().into()); return Ok(ScalarMaybeUndef::Undef);
} }
// Now we do the actual reading // Now we do the actual reading
let bits = read_target_uint(endianness, bytes).unwrap(); let bits = read_target_uint(endianness, bytes).unwrap();
@ -776,44 +774,52 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
} else { } else {
let alloc = self.get(ptr.alloc_id)?; let alloc = self.get(ptr.alloc_id)?;
match alloc.relocations.get(&ptr.offset) { match alloc.relocations.get(&ptr.offset) {
Some(&alloc_id) => return Ok(Pointer::new(alloc_id, Size::from_bytes(bits as u64)).into()), Some(&alloc_id) => return Ok(ScalarMaybeUndef::Scalar(Pointer::new(alloc_id, Size::from_bytes(bits as u64)).into())),
None => {}, None => {},
} }
} }
// We don't. Just return the bits. // We don't. Just return the bits.
Ok(Scalar::Bits { Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, bits,
defined: size.bits() as u8, size: size.bytes() as u8,
}) }))
} }
pub fn read_ptr_sized(&self, ptr: Pointer, ptr_align: Align) -> EvalResult<'tcx, Scalar> { pub fn read_ptr_sized(&self, ptr: Pointer, ptr_align: Align) -> EvalResult<'tcx, ScalarMaybeUndef> {
self.read_scalar(ptr, ptr_align, self.pointer_size()) self.read_scalar(ptr, ptr_align, self.pointer_size())
} }
pub fn write_scalar(&mut self, ptr: Scalar, ptr_align: Align, val: Scalar, size: Size, signed: bool) -> EvalResult<'tcx> { pub fn write_scalar(&mut self, ptr: Scalar, ptr_align: Align, val: ScalarMaybeUndef, type_size: Size, signed: bool) -> EvalResult<'tcx> {
let endianness = self.endianness(); let endianness = self.endianness();
let val = match val {
ScalarMaybeUndef::Scalar(scalar) => scalar,
ScalarMaybeUndef::Undef => return self.mark_definedness(ptr, type_size, false),
};
let bytes = match val { let bytes = match val {
Scalar::Ptr(val) => { Scalar::Ptr(val) => {
assert_eq!(size, self.pointer_size()); assert_eq!(type_size, self.pointer_size());
val.offset.bytes() as u128 val.offset.bytes() as u128
} }
Scalar::Bits { bits, defined } if defined as u64 >= size.bits() && size.bits() != 0 => bits, Scalar::Bits { size: 0, .. } => {
// nothing to do for ZSTs
Scalar::Bits { .. } => { assert_eq!(type_size.bytes(), 0);
self.check_align(ptr.into(), ptr_align)?;
self.mark_definedness(ptr, size, false)?;
return Ok(()); return Ok(());
} }
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, type_size.bytes());
bits
},
}; };
let ptr = ptr.to_ptr()?; let ptr = ptr.to_ptr()?;
{ {
let align = self.int_align(size); let align = self.int_align(type_size);
let dst = self.get_bytes_mut(ptr, size, ptr_align.min(align))?; let dst = self.get_bytes_mut(ptr, type_size, ptr_align.min(align))?;
if signed { if signed {
write_target_int(endianness, dst, bytes as i128).unwrap(); write_target_int(endianness, dst, bytes as i128).unwrap();
} else { } else {
@ -835,7 +841,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
Ok(()) Ok(())
} }
pub fn write_ptr_sized_unsigned(&mut self, ptr: Pointer, ptr_align: Align, val: Scalar) -> EvalResult<'tcx> { pub fn write_ptr_sized_unsigned(&mut self, ptr: Pointer, ptr_align: Align, val: ScalarMaybeUndef) -> EvalResult<'tcx> {
let ptr_size = self.pointer_size(); let ptr_size = self.pointer_size();
self.write_scalar(ptr.into(), ptr_align, val, ptr_size, false) self.write_scalar(ptr.into(), ptr_align, val, ptr_size, false)
} }
@ -984,7 +990,7 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
fn into_ptr( fn into_ptr(
&self, &self,
value: Value, value: Value,
) -> EvalResult<'tcx, Scalar> { ) -> EvalResult<'tcx, ScalarMaybeUndef> {
Ok(match value { Ok(match value {
Value::ByRef(ptr, align) => { Value::ByRef(ptr, align) => {
self.memory().read_ptr_sized(ptr.to_ptr()?, align)? self.memory().read_ptr_sized(ptr.to_ptr()?, align)?
@ -997,7 +1003,7 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
fn into_ptr_vtable_pair( fn into_ptr_vtable_pair(
&self, &self,
value: Value, value: Value,
) -> EvalResult<'tcx, (Scalar, Pointer)> { ) -> EvalResult<'tcx, (ScalarMaybeUndef, Pointer)> {
match value { match value {
Value::ByRef(ref_ptr, align) => { Value::ByRef(ref_ptr, align) => {
let mem = self.memory(); let mem = self.memory();
@ -1005,11 +1011,11 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
let vtable = mem.read_ptr_sized( let vtable = mem.read_ptr_sized(
ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?, ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?,
align align
)?.to_ptr()?; )?.read()?.to_ptr()?;
Ok((ptr, vtable)) Ok((ptr, vtable))
} }
Value::ScalarPair(ptr, vtable) => Ok((ptr.into(), vtable.to_ptr()?)), Value::ScalarPair(ptr, vtable) => Ok((ptr, vtable.read()?.to_ptr()?)),
_ => bug!("expected ptr and vtable, got {:?}", value), _ => bug!("expected ptr and vtable, got {:?}", value),
} }
} }
@ -1017,7 +1023,7 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
fn into_slice( fn into_slice(
&self, &self,
value: Value, value: Value,
) -> EvalResult<'tcx, (Scalar, u64)> { ) -> EvalResult<'tcx, (ScalarMaybeUndef, u64)> {
match value { match value {
Value::ByRef(ref_ptr, align) => { Value::ByRef(ref_ptr, align) => {
let mem = self.memory(); let mem = self.memory();
@ -1025,12 +1031,12 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
let len = mem.read_ptr_sized( let len = mem.read_ptr_sized(
ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?, ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?,
align align
)?.to_bits(mem.pointer_size())? as u64; )?.read()?.to_bits(mem.pointer_size())? as u64;
Ok((ptr, len)) Ok((ptr, len))
} }
Value::ScalarPair(ptr, val) => { Value::ScalarPair(ptr, val) => {
let len = val.to_bits(self.memory().pointer_size())?; let len = val.read()?.to_bits(self.memory().pointer_size())?;
Ok((ptr.into(), len as u64)) Ok((ptr, len as u64))
} }
Value::Scalar(_) => bug!("expected ptr and length, got {:?}", value), Value::Scalar(_) => bug!("expected ptr and length, got {:?}", value),
} }

View File

@ -11,8 +11,10 @@ mod step;
mod terminator; mod terminator;
mod traits; mod traits;
pub use self::eval_context::{EvalContext, Frame, StackPopCleanup, pub use self::eval_context::{
TyAndPacked, ValTy}; EvalContext, Frame, StackPopCleanup,
TyAndPacked, ValTy,
};
pub use self::place::{Place, PlaceExtra}; pub use self::place::{Place, PlaceExtra};
@ -34,26 +36,21 @@ pub use self::machine::Machine;
pub use self::memory::{write_target_uint, write_target_int, read_target_uint}; pub use self::memory::{write_target_uint, write_target_int, read_target_uint};
use rustc::mir::interpret::{EvalResult, EvalErrorKind}; use rustc::ty::layout::TyLayout;
use rustc::ty::{Ty, TyCtxt, ParamEnv};
pub fn sign_extend<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> { pub fn sign_extend(value: u128, layout: TyLayout<'_>) -> u128 {
let param_env = ParamEnv::empty();
let layout = tcx.layout_of(param_env.and(ty)).map_err(|layout| EvalErrorKind::Layout(layout))?;
let size = layout.size.bits(); let size = layout.size.bits();
assert!(layout.abi.is_signed()); assert!(layout.abi.is_signed());
// sign extend // sign extend
let shift = 128 - size; let shift = 128 - size;
// shift the unsigned value to the left // shift the unsigned value to the left
// and back to the right as signed (essentially fills with FF on the left) // and back to the right as signed (essentially fills with FF on the left)
Ok((((value << shift) as i128) >> shift) as u128) (((value << shift) as i128) >> shift) as u128
} }
pub fn truncate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> { pub fn truncate(value: u128, layout: TyLayout<'_>) -> u128 {
let param_env = ParamEnv::empty();
let layout = tcx.layout_of(param_env.and(ty)).map_err(|layout| EvalErrorKind::Layout(layout))?;
let size = layout.size.bits(); let size = layout.size.bits();
let shift = 128 - size; let shift = 128 - size;
// truncate (shift left to drop out leftover values, shift right to fill with zeroes) // truncate (shift left to drop out leftover values, shift right to fill with zeroes)
Ok((value << shift) >> shift) (value << shift) >> shift
} }

View File

@ -1,7 +1,7 @@
use rustc::mir; use rustc::mir;
use rustc::ty::{self, Ty, layout}; use rustc::ty::{self, Ty, layout};
use syntax::ast::FloatTy; use syntax::ast::FloatTy;
use rustc::ty::layout::LayoutOf; use rustc::ty::layout::{LayoutOf, TyLayout};
use rustc_apfloat::ieee::{Double, Single}; use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float; use rustc_apfloat::Float;
@ -32,7 +32,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
dest_ty: Ty<'tcx>, dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> { ) -> EvalResult<'tcx> {
let (val, overflowed) = self.binop_with_overflow(op, left, right)?; let (val, overflowed) = self.binop_with_overflow(op, left, right)?;
let val = Value::ScalarPair(val, Scalar::from_bool(overflowed)); let val = Value::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
let valty = ValTy { let valty = ValTy {
value: val, value: val,
ty: dest_ty, ty: dest_ty,
@ -97,13 +97,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let signed = left_layout.abi.is_signed(); let signed = left_layout.abi.is_signed();
let mut oflo = (r as u32 as u128) != r; let mut oflo = (r as u32 as u128) != r;
let mut r = r as u32; let mut r = r as u32;
let size = left_layout.size.bits() as u32; let size = left_layout.size;
oflo |= r >= size; oflo |= r >= size.bits() as u32;
if oflo { if oflo {
r %= size; r %= size.bits() as u32;
} }
let result = if signed { let result = if signed {
let l = self.sign_extend(l, left_ty)? as i128; let l = self.sign_extend(l, left_layout) as i128;
let result = match bin_op { let result = match bin_op {
Shl => l << r, Shl => l << r,
Shr => l >> r, Shr => l >> r,
@ -117,10 +117,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => bug!("it has already been checked that this is a shift op"), _ => bug!("it has already been checked that this is a shift op"),
} }
}; };
let truncated = self.truncate(result, left_ty)?; let truncated = self.truncate(result, left_layout);
return Ok((Scalar::Bits { return Ok((Scalar::Bits {
bits: truncated, bits: truncated,
defined: size as u8, size: size.bytes() as u8,
}, oflo)); }, oflo));
} }
@ -145,8 +145,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => None, _ => None,
}; };
if let Some(op) = op { if let Some(op) = op {
let l = self.sign_extend(l, left_ty)? as i128; let l = self.sign_extend(l, left_layout) as i128;
let r = self.sign_extend(r, right_ty)? as i128; let r = self.sign_extend(r, right_layout) as i128;
return Ok((Scalar::from_bool(op(&l, &r)), false)); return Ok((Scalar::from_bool(op(&l, &r)), false));
} }
let op: Option<fn(i128, i128) -> (i128, bool)> = match bin_op { let op: Option<fn(i128, i128) -> (i128, bool)> = match bin_op {
@ -160,14 +160,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => None, _ => None,
}; };
if let Some(op) = op { if let Some(op) = op {
let l128 = self.sign_extend(l, left_ty)? as i128; let l128 = self.sign_extend(l, left_layout) as i128;
let r = self.sign_extend(r, right_ty)? as i128; let r = self.sign_extend(r, right_layout) as i128;
let size = left_layout.size.bits(); let size = left_layout.size;
match bin_op { match bin_op {
Rem | Div => { Rem | Div => {
// int_min / -1 // int_min / -1
if r == -1 && l == (1 << (size - 1)) { if r == -1 && l == (1 << (size.bits() - 1)) {
return Ok((Scalar::Bits { bits: l, defined: size as u8 }, true)); return Ok((Scalar::Bits { bits: l, size: size.bytes() as u8 }, true));
} }
}, },
_ => {}, _ => {},
@ -175,27 +175,27 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
trace!("{}, {}, {}", l, l128, r); trace!("{}, {}, {}", l, l128, r);
let (result, mut oflo) = op(l128, r); let (result, mut oflo) = op(l128, r);
trace!("{}, {}", result, oflo); trace!("{}, {}", result, oflo);
if !oflo && size != 128 { if !oflo && size.bits() != 128 {
let max = 1 << (size - 1); let max = 1 << (size.bits() - 1);
oflo = result >= max || result < -max; oflo = result >= max || result < -max;
} }
let result = result as u128; let result = result as u128;
let truncated = self.truncate(result, left_ty)?; let truncated = self.truncate(result, left_layout);
return Ok((Scalar::Bits { return Ok((Scalar::Bits {
bits: truncated, bits: truncated,
defined: size as u8, size: size.bytes() as u8,
}, oflo)); }, oflo));
} }
} }
if let ty::TyFloat(fty) = left_ty.sty { if let ty::TyFloat(fty) = left_ty.sty {
macro_rules! float_math { macro_rules! float_math {
($ty:path, $bitsize:expr) => {{ ($ty:path, $size:expr) => {{
let l = <$ty>::from_bits(l); let l = <$ty>::from_bits(l);
let r = <$ty>::from_bits(r); let r = <$ty>::from_bits(r);
let bitify = |res: ::rustc_apfloat::StatusAnd<$ty>| Scalar::Bits { let bitify = |res: ::rustc_apfloat::StatusAnd<$ty>| Scalar::Bits {
bits: res.value.to_bits(), bits: res.value.to_bits(),
defined: $bitsize, size: $size,
}; };
let val = match bin_op { let val = match bin_op {
Eq => Scalar::from_bool(l == r), Eq => Scalar::from_bool(l == r),
@ -215,12 +215,12 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
}}; }};
} }
match fty { match fty {
FloatTy::F32 => float_math!(Single, 32), FloatTy::F32 => float_math!(Single, 4),
FloatTy::F64 => float_math!(Double, 64), FloatTy::F64 => float_math!(Double, 8),
} }
} }
let bit_width = self.layout_of(left_ty).unwrap().size.bits() as u8; let size = self.layout_of(left_ty).unwrap().size.bytes() as u8;
// only ints left // only ints left
let val = match bin_op { let val = match bin_op {
@ -232,9 +232,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
Gt => Scalar::from_bool(l > r), Gt => Scalar::from_bool(l > r),
Ge => Scalar::from_bool(l >= r), Ge => Scalar::from_bool(l >= r),
BitOr => Scalar::Bits { bits: l | r, defined: bit_width }, BitOr => Scalar::Bits { bits: l | r, size },
BitAnd => Scalar::Bits { bits: l & r, defined: bit_width }, BitAnd => Scalar::Bits { bits: l & r, size },
BitXor => Scalar::Bits { bits: l ^ r, defined: bit_width }, BitXor => Scalar::Bits { bits: l ^ r, size },
Add | Sub | Mul | Rem | Div => { Add | Sub | Mul | Rem | Div => {
let op: fn(u128, u128) -> (u128, bool) = match bin_op { let op: fn(u128, u128) -> (u128, bool) = match bin_op {
@ -248,10 +248,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => bug!(), _ => bug!(),
}; };
let (result, oflo) = op(l, r); let (result, oflo) = op(l, r);
let truncated = self.truncate(result, left_ty)?; let truncated = self.truncate(result, left_layout);
return Ok((Scalar::Bits { return Ok((Scalar::Bits {
bits: truncated, bits: truncated,
defined: bit_width, size,
}, oflo || truncated != result)); }, oflo || truncated != result));
} }
@ -275,17 +275,16 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
&self, &self,
un_op: mir::UnOp, un_op: mir::UnOp,
val: Scalar, val: Scalar,
ty: Ty<'tcx>, layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, Scalar> { ) -> EvalResult<'tcx, Scalar> {
use rustc::mir::UnOp::*; use rustc::mir::UnOp::*;
use rustc_apfloat::ieee::{Single, Double}; use rustc_apfloat::ieee::{Single, Double};
use rustc_apfloat::Float; use rustc_apfloat::Float;
let size = self.layout_of(ty)?.size; let size = layout.size;
let bytes = val.to_bits(size)?; let bytes = val.to_bits(size)?;
let size = size.bits();
let result_bytes = match (un_op, &ty.sty) { let result_bytes = match (un_op, &layout.ty.sty) {
(Not, ty::TyBool) => !val.to_bool()? as u128, (Not, ty::TyBool) => !val.to_bool()? as u128,
@ -294,13 +293,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
(Neg, ty::TyFloat(FloatTy::F32)) => Single::to_bits(-Single::from_bits(bytes)), (Neg, ty::TyFloat(FloatTy::F32)) => Single::to_bits(-Single::from_bits(bytes)),
(Neg, ty::TyFloat(FloatTy::F64)) => Double::to_bits(-Double::from_bits(bytes)), (Neg, ty::TyFloat(FloatTy::F64)) => Double::to_bits(-Double::from_bits(bytes)),
(Neg, _) if bytes == (1 << (size - 1)) => return err!(OverflowNeg), (Neg, _) if bytes == (1 << (size.bits() - 1)) => return err!(OverflowNeg),
(Neg, _) => (-(bytes as i128)) as u128, (Neg, _) => (-(bytes as i128)) as u128,
}; };
Ok(Scalar::Bits { Ok(Scalar::Bits {
bits: self.truncate(result_bytes, ty)?, bits: self.truncate(result_bytes, layout),
defined: size as u8, size: size.bytes() as u8,
}) })
} }
} }

View File

@ -3,7 +3,7 @@ use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout}; use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
use rustc::mir::interpret::{GlobalId, Value, Scalar, EvalResult, Pointer}; use rustc::mir::interpret::{GlobalId, Value, Scalar, EvalResult, Pointer, ScalarMaybeUndef};
use super::{EvalContext, Machine, ValTy}; use super::{EvalContext, Machine, ValTy};
use interpret::memory::HasMemory; use interpret::memory::HasMemory;
@ -14,7 +14,7 @@ pub enum Place {
/// A place may have an invalid (integral or undef) pointer, /// A place may have an invalid (integral or undef) pointer,
/// since it might be turned back into a reference /// since it might be turned back into a reference
/// before ever being dereferenced. /// before ever being dereferenced.
ptr: Scalar, ptr: ScalarMaybeUndef,
align: Align, align: Align,
extra: PlaceExtra, extra: PlaceExtra,
}, },
@ -35,10 +35,10 @@ pub enum PlaceExtra {
impl<'tcx> Place { impl<'tcx> Place {
/// Produces a Place that will error if attempted to be read from /// Produces a Place that will error if attempted to be read from
pub fn undef() -> Self { pub fn undef() -> Self {
Self::from_scalar_ptr(Scalar::undef().into(), Align::from_bytes(1, 1).unwrap()) Self::from_scalar_ptr(ScalarMaybeUndef::Undef, Align::from_bytes(1, 1).unwrap())
} }
pub fn from_scalar_ptr(ptr: Scalar, align: Align) -> Self { pub fn from_scalar_ptr(ptr: ScalarMaybeUndef, align: Align) -> Self {
Place::Ptr { Place::Ptr {
ptr, ptr,
align, align,
@ -47,10 +47,10 @@ impl<'tcx> Place {
} }
pub fn from_ptr(ptr: Pointer, align: Align) -> Self { pub fn from_ptr(ptr: Pointer, align: Align) -> Self {
Self::from_scalar_ptr(ptr.into(), align) Self::from_scalar_ptr(ScalarMaybeUndef::Scalar(ptr.into()), align)
} }
pub fn to_ptr_align_extra(self) -> (Scalar, Align, PlaceExtra) { pub fn to_ptr_align_extra(self) -> (ScalarMaybeUndef, Align, PlaceExtra) {
match self { match self {
Place::Ptr { ptr, align, extra } => (ptr, align, extra), Place::Ptr { ptr, align, extra } => (ptr, align, extra),
_ => bug!("to_ptr_and_extra: expected Place::Ptr, got {:?}", self), _ => bug!("to_ptr_and_extra: expected Place::Ptr, got {:?}", self),
@ -58,17 +58,17 @@ impl<'tcx> Place {
} }
} }
pub fn to_ptr_align(self) -> (Scalar, Align) { pub fn to_ptr_align(self) -> (ScalarMaybeUndef, Align) {
let (ptr, align, _extra) = self.to_ptr_align_extra(); let (ptr, align, _extra) = self.to_ptr_align_extra();
(ptr, align) (ptr, align)
} }
/*
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> { pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> {
// At this point, we forget about the alignment information -- the place has been turned into a reference, // At this point, we forget about the alignment information -- the place has been turned into a reference,
// and no matter where it came from, it now must be aligned. // and no matter where it came from, it now must be aligned.
self.to_ptr_align().0.to_ptr() self.to_ptr_align().0.to_ptr()
} }
*/
pub(super) fn elem_ty_and_len( pub(super) fn elem_ty_and_len(
self, self,
ty: Ty<'tcx>, ty: Ty<'tcx>,
@ -106,7 +106,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
// Might allow this in the future, right now there's no way to do this from Rust code anyway // Might allow this in the future, right now there's no way to do this from Rust code anyway
Local(mir::RETURN_PLACE) => err!(ReadFromReturnPointer), Local(mir::RETURN_PLACE) => err!(ReadFromReturnPointer),
// Directly reading a local will always succeed // Directly reading a local will always succeed
Local(local) => self.frame().get_local(local).map(Some), Local(local) => self.frame().locals[local].access().map(Some),
// No fast path for statics. Reading from statics is rare and would require another // No fast path for statics. Reading from statics is rare and would require another
// Machine function to handle differently in miri. // Machine function to handle differently in miri.
Promoted(_) | Promoted(_) |
@ -129,7 +129,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let field = base_layout.field(self, field_index)?; let field = base_layout.field(self, field_index)?;
if field.size.bytes() == 0 { if field.size.bytes() == 0 {
return Ok(( return Ok((
Value::Scalar(Scalar::undef()), Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 })),
field, field,
)); ));
} }
@ -197,9 +197,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
match place { match place {
Place::Ptr { ptr, align, extra } => { Place::Ptr { ptr, align, extra } => {
assert_eq!(extra, PlaceExtra::None); assert_eq!(extra, PlaceExtra::None);
Ok(Value::ByRef(ptr, align)) Ok(Value::ByRef(ptr.read()?, align))
} }
Place::Local { frame, local } => self.stack[frame].get_local(local), Place::Local { frame, local } => self.stack[frame].locals[local].access(),
} }
} }
@ -220,7 +220,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
})?; })?;
if let Value::ByRef(ptr, align) = val { if let Value::ByRef(ptr, align) = val {
Place::Ptr { Place::Ptr {
ptr, ptr: ptr.into(),
align, align,
extra: PlaceExtra::None, extra: PlaceExtra::None,
} }
@ -238,7 +238,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
}; };
let alloc = Machine::init_static(self, cid)?; let alloc = Machine::init_static(self, cid)?;
Place::Ptr { Place::Ptr {
ptr: Scalar::Ptr(alloc.into()), ptr: ScalarMaybeUndef::Scalar(Scalar::Ptr(alloc.into())),
align: layout.align, align: layout.align,
extra: PlaceExtra::None, extra: PlaceExtra::None,
} }
@ -276,14 +276,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let (base_ptr, base_align, base_extra) = match base { let (base_ptr, base_align, base_extra) = match base {
Place::Ptr { ptr, align, extra } => (ptr, align, extra), Place::Ptr { ptr, align, extra } => (ptr, align, extra),
Place::Local { frame, local } => { Place::Local { frame, local } => {
match (&self.stack[frame].get_local(local)?, &base_layout.abi) { match (self.stack[frame].locals[local].access()?, &base_layout.abi) {
// in case the field covers the entire type, just return the value // in case the field covers the entire type, just return the value
(&Value::Scalar(_), &layout::Abi::Scalar(_)) | (Value::Scalar(_), &layout::Abi::Scalar(_)) |
(&Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) (Value::ScalarPair(..), &layout::Abi::ScalarPair(..))
if offset.bytes() == 0 && field.size == base_layout.size => if offset.bytes() == 0 && field.size == base_layout.size => {
{ return Ok((base, field))
return Ok((base, field)); },
}
_ => self.force_allocation(base)?.to_ptr_align_extra(), _ => self.force_allocation(base)?.to_ptr_align_extra(),
} }
} }
@ -413,7 +412,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
} }
Index(local) => { Index(local) => {
let value = self.frame().get_local(local)?; let value = self.frame().locals[local].access()?;
let ty = self.tcx.types.usize; let ty = self.tcx.types.usize;
let n = self let n = self
.value_to_scalar(ValTy { value, ty })? .value_to_scalar(ValTy { value, ty })?

View File

@ -90,7 +90,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
// Mark locals as alive // Mark locals as alive
StorageLive(local) => { StorageLive(local) => {
let old_val = self.frame_mut().storage_live(local); let old_val = self.storage_live(local)?;
self.deallocate_local(old_val)?; self.deallocate_local(old_val)?;
} }

View File

@ -2,7 +2,7 @@ use rustc::mir::BasicBlock;
use rustc::ty::{self, Ty}; use rustc::ty::{self, Ty};
use syntax::codemap::Span; use syntax::codemap::Span;
use rustc::mir::interpret::{EvalResult, Scalar, Value}; use rustc::mir::interpret::{EvalResult, Value};
use interpret::{Machine, ValTy, EvalContext, Place, PlaceExtra}; use interpret::{Machine, ValTy, EvalContext, Place, PlaceExtra};
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
@ -33,7 +33,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
ptr, ptr,
align: _, align: _,
extra: PlaceExtra::None, extra: PlaceExtra::None,
} => ptr.to_value(), } => Value::Scalar(ptr),
_ => bug!("force_allocation broken"), _ => bug!("force_allocation broken"),
}; };
self.drop(val, instance, ty, span, target) self.drop(val, instance, ty, span, target)
@ -51,17 +51,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let instance = match ty.sty { let instance = match ty.sty {
ty::TyDynamic(..) => { ty::TyDynamic(..) => {
let vtable = match arg { if let Value::ScalarPair(_, vtable) = arg {
Value::ScalarPair(_, Scalar::Ptr(vtable)) => vtable, self.read_drop_type_from_vtable(vtable.read()?.to_ptr()?)?
_ => bug!("expected fat ptr, got {:?}", arg), } else {
}; bug!("expected fat ptr, got {:?}", arg);
match self.read_drop_type_from_vtable(vtable)? {
Some(func) => func,
// no drop fn -> bail out
None => {
self.goto_block(target);
return Ok(())
},
} }
} }
_ => instance, _ => instance,

View File

@ -4,7 +4,7 @@ use rustc::ty::layout::{LayoutOf, Size};
use syntax::codemap::Span; use syntax::codemap::Span;
use rustc_target::spec::abi::Abi; use rustc_target::spec::abi::Abi;
use rustc::mir::interpret::{EvalResult, Scalar}; use rustc::mir::interpret::{EvalResult, Scalar, Value};
use super::{EvalContext, Place, Machine, ValTy}; use super::{EvalContext, Place, Machine, ValTy};
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
@ -47,7 +47,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
for (index, &const_int) in values.iter().enumerate() { for (index, &const_int) in values.iter().enumerate() {
// Compare using binary_op // Compare using binary_op
let const_int = Scalar::Bits { bits: const_int, defined: 128 }; let const_int = Scalar::Bits { bits: const_int, size: discr_layout.size.bytes() as u8 };
let res = self.binary_op(mir::BinOp::Eq, let res = self.binary_op(mir::BinOp::Eq,
discr_prim, discr_val.ty, discr_prim, discr_val.ty,
const_int, discr_val.ty const_int, discr_val.ty
@ -392,12 +392,12 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let fn_ptr = self.memory.read_ptr_sized( let fn_ptr = self.memory.read_ptr_sized(
vtable.offset(ptr_size * (idx as u64 + 3), &self)?, vtable.offset(ptr_size * (idx as u64 + 3), &self)?,
ptr_align ptr_align
)?.to_ptr()?; )?.read()?.to_ptr()?;
let instance = self.memory.get_fn(fn_ptr)?; let instance = self.memory.get_fn(fn_ptr)?;
let mut args = args.to_vec(); let mut args = args.to_vec();
let ty = self.layout_of(args[0].ty)?.field(&self, 0)?.ty; let ty = self.layout_of(args[0].ty)?.field(&self, 0)?.ty;
args[0].ty = ty; args[0].ty = ty;
args[0].value = ptr.to_value(); args[0].value = Value::Scalar(ptr);
// recurse with concrete function // recurse with concrete function
self.eval_fn_call(instance, destination, &args, span, sig) self.eval_fn_call(instance, destination, &args, span, sig)
} }

View File

@ -1,6 +1,6 @@
use rustc::ty::{self, Ty}; use rustc::ty::{self, Ty};
use rustc::ty::layout::{Size, Align, LayoutOf}; use rustc::ty::layout::{Size, Align, LayoutOf};
use rustc::mir::interpret::{Scalar, Value, Pointer, EvalResult}; use rustc::mir::interpret::{Scalar, Pointer, EvalResult};
use syntax::ast::Mutability; use syntax::ast::Mutability;
@ -36,25 +36,25 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let drop = ::monomorphize::resolve_drop_in_place(*self.tcx, ty); let drop = ::monomorphize::resolve_drop_in_place(*self.tcx, ty);
let drop = self.memory.create_fn_alloc(drop); let drop = self.memory.create_fn_alloc(drop);
self.memory.write_ptr_sized_unsigned(vtable, ptr_align, drop.into())?; self.memory.write_ptr_sized_unsigned(vtable, ptr_align, Scalar::Ptr(drop).into())?;
let size_ptr = vtable.offset(ptr_size, &self)?; let size_ptr = vtable.offset(ptr_size, &self)?;
self.memory.write_ptr_sized_unsigned(size_ptr, ptr_align, Scalar::Bits { self.memory.write_ptr_sized_unsigned(size_ptr, ptr_align, Scalar::Bits {
bits: size as u128, bits: size as u128,
defined: ptr_size.bits() as u8, size: ptr_size.bytes() as u8,
})?; }.into())?;
let align_ptr = vtable.offset(ptr_size * 2, &self)?; let align_ptr = vtable.offset(ptr_size * 2, &self)?;
self.memory.write_ptr_sized_unsigned(align_ptr, ptr_align, Scalar::Bits { self.memory.write_ptr_sized_unsigned(align_ptr, ptr_align, Scalar::Bits {
bits: align as u128, bits: align as u128,
defined: ptr_size.bits() as u8, size: ptr_size.bytes() as u8,
})?; }.into())?;
for (i, method) in methods.iter().enumerate() { for (i, method) in methods.iter().enumerate() {
if let Some((def_id, substs)) = *method { if let Some((def_id, substs)) = *method {
let instance = self.resolve(def_id, substs)?; let instance = self.resolve(def_id, substs)?;
let fn_ptr = self.memory.create_fn_alloc(instance); let fn_ptr = self.memory.create_fn_alloc(instance);
let method_ptr = vtable.offset(ptr_size * (3 + i as u64), &self)?; let method_ptr = vtable.offset(ptr_size * (3 + i as u64), &self)?;
self.memory.write_ptr_sized_unsigned(method_ptr, ptr_align, fn_ptr.into())?; self.memory.write_ptr_sized_unsigned(method_ptr, ptr_align, Scalar::Ptr(fn_ptr).into())?;
} }
} }
@ -69,16 +69,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
pub fn read_drop_type_from_vtable( pub fn read_drop_type_from_vtable(
&self, &self,
vtable: Pointer, vtable: Pointer,
) -> EvalResult<'tcx, Option<ty::Instance<'tcx>>> { ) -> EvalResult<'tcx, ty::Instance<'tcx>> {
// we don't care about the pointee type, we just want a pointer // we don't care about the pointee type, we just want a pointer
let pointer_align = self.tcx.data_layout.pointer_align; let pointer_align = self.tcx.data_layout.pointer_align;
let pointer_size = self.tcx.data_layout.pointer_size.bits() as u8; let drop_fn = self.memory.read_ptr_sized(vtable, pointer_align)?.read()?.to_ptr()?;
match self.read_ptr(vtable, pointer_align, self.tcx.mk_nil_ptr())? { self.memory.get_fn(drop_fn)
// some values don't need to call a drop impl, so the value is null
Value::Scalar(Scalar::Bits { bits: 0, defined} ) if defined == pointer_size => Ok(None),
Value::Scalar(Scalar::Ptr(drop_fn)) => self.memory.get_fn(drop_fn).map(Some),
_ => err!(ReadBytesAsPointer),
}
} }
pub fn read_size_and_align_from_vtable( pub fn read_size_and_align_from_vtable(
@ -87,11 +82,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
) -> EvalResult<'tcx, (Size, Align)> { ) -> EvalResult<'tcx, (Size, Align)> {
let pointer_size = self.memory.pointer_size(); let pointer_size = self.memory.pointer_size();
let pointer_align = self.tcx.data_layout.pointer_align; let pointer_align = self.tcx.data_layout.pointer_align;
let size = self.memory.read_ptr_sized(vtable.offset(pointer_size, self)?, pointer_align)?.to_bits(pointer_size)? as u64; let size = self.memory.read_ptr_sized(vtable.offset(pointer_size, self)?, pointer_align)?.read()?.to_bits(pointer_size)? as u64;
let align = self.memory.read_ptr_sized( let align = self.memory.read_ptr_sized(
vtable.offset(pointer_size * 2, self)?, vtable.offset(pointer_size * 2, self)?,
pointer_align pointer_align
)?.to_bits(pointer_size)? as u64; )?.read()?.to_bits(pointer_size)? as u64;
Ok((Size::from_bytes(size), Align::from_bytes(align, align).unwrap())) Ok((Size::from_bytes(size), Align::from_bytes(align, align).unwrap()))
} }
} }

View File

@ -17,7 +17,7 @@ use rustc::mir::{Constant, Location, Place, Mir, Operand, Rvalue, Local};
use rustc::mir::{NullOp, StatementKind, Statement, BasicBlock, LocalKind}; use rustc::mir::{NullOp, StatementKind, Statement, BasicBlock, LocalKind};
use rustc::mir::{TerminatorKind, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem}; use rustc::mir::{TerminatorKind, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem};
use rustc::mir::visit::{Visitor, PlaceContext}; use rustc::mir::visit::{Visitor, PlaceContext};
use rustc::mir::interpret::{ConstEvalErr, EvalErrorKind}; use rustc::mir::interpret::{ConstEvalErr, EvalErrorKind, ScalarMaybeUndef};
use rustc::ty::{TyCtxt, self, Instance}; use rustc::ty::{TyCtxt, self, Instance};
use rustc::mir::interpret::{Value, Scalar, GlobalId, EvalResult}; use rustc::mir::interpret::{Value, Scalar, GlobalId, EvalResult};
use interpret::EvalContext; use interpret::EvalContext;
@ -368,7 +368,7 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some(( type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some((
Value::Scalar(Scalar::Bits { Value::Scalar(Scalar::Bits {
bits: n as u128, bits: n as u128,
defined: self.tcx.data_layout.pointer_size.bits() as u8, size: self.tcx.data_layout.pointer_size.bytes() as u8,
}), }),
self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?, self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
span, span,
@ -390,7 +390,7 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
let prim = self.use_ecx(source_info, |this| { let prim = self.use_ecx(source_info, |this| {
this.ecx.value_to_scalar(ValTy { value: val.0, ty: val.1.ty }) this.ecx.value_to_scalar(ValTy { value: val.0, ty: val.1.ty })
})?; })?;
let val = self.use_ecx(source_info, |this| this.ecx.unary_op(op, prim, val.1.ty))?; let val = self.use_ecx(source_info, |this| this.ecx.unary_op(op, prim, val.1))?;
Some((Value::Scalar(val), place_layout, span)) Some((Value::Scalar(val), place_layout, span))
} }
Rvalue::CheckedBinaryOp(op, ref left, ref right) | Rvalue::CheckedBinaryOp(op, ref left, ref right) |
@ -449,8 +449,8 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
})?; })?;
let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue { let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue {
Value::ScalarPair( Value::ScalarPair(
val, val.into(),
Scalar::from_bool(overflow), Scalar::from_bool(overflow).into(),
) )
} else { } else {
if overflow { if overflow {
@ -458,7 +458,7 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
let _: Option<()> = self.use_ecx(source_info, |_| Err(err)); let _: Option<()> = self.use_ecx(source_info, |_| Err(err));
return None; return None;
} }
Value::Scalar(val) Value::Scalar(val.into())
}; };
Some((val, place_layout, span)) Some((val, place_layout, span))
}, },
@ -576,7 +576,7 @@ impl<'b, 'a, 'tcx> Visitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
if let TerminatorKind::Assert { expected, msg, cond, .. } = kind { if let TerminatorKind::Assert { expected, msg, cond, .. } = kind {
if let Some(value) = self.eval_operand(cond, source_info) { if let Some(value) = self.eval_operand(cond, source_info) {
trace!("assertion on {:?} should be {:?}", value, expected); trace!("assertion on {:?} should be {:?}", value, expected);
if Value::Scalar(Scalar::from_bool(*expected)) != value.0 { if Value::Scalar(Scalar::from_bool(*expected).into()) != value.0 {
// poison all places this operand references so that further code // poison all places this operand references so that further code
// doesn't use the invalid value // doesn't use the invalid value
match cond { match cond {
@ -613,14 +613,18 @@ impl<'b, 'a, 'tcx> Visitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
.eval_operand(len, source_info) .eval_operand(len, source_info)
.expect("len must be const"); .expect("len must be const");
let len = match len.0 { let len = match len.0 {
Value::Scalar(Scalar::Bits { bits, ..}) => bits, Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
_ => bug!("const len not primitive: {:?}", len), _ => bug!("const len not primitive: {:?}", len),
}; };
let index = self let index = self
.eval_operand(index, source_info) .eval_operand(index, source_info)
.expect("index must be const"); .expect("index must be const");
let index = match index.0 { let index = match index.0 {
Value::Scalar(Scalar::Bits { bits, .. }) => bits, Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
_ => bug!("const index not primitive: {:?}", index), _ => bug!("const index not primitive: {:?}", index),
}; };
format!( format!(