rename Scalar::Bits to Scalar::Raw and bits field to data

This commit is contained in:
Ralf Jung 2019-05-25 10:59:09 +02:00
parent 572892c324
commit 082da0c698
17 changed files with 93 additions and 107 deletions

View File

@ -388,11 +388,11 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
val.offset.bytes() as u128
}
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, type_size.bytes());
debug_assert_eq!(truncate(bits, Size::from_bytes(size.into())), bits,
debug_assert_eq!(truncate(data, Size::from_bytes(size.into())), data,
"Unexpected value of size {} when writing to memory", size);
bits
data
},
};

View File

@ -87,11 +87,11 @@ impl<'tcx> ConstValue<'tcx> {
RustcEncodable, RustcDecodable, Hash, HashStable)]
pub enum Scalar<Tag=(), Id=AllocId> {
/// The raw bytes of a simple value.
Bits {
/// The first `size` bytes are the value.
Raw {
/// The first `size` bytes of `data` are the value.
/// Do not try to read less or more bytes than that. The remaining bytes must be 0.
data: u128,
size: u8,
bits: u128,
},
/// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
@ -108,16 +108,16 @@ impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for Scalar<Tag, Id> {
match self {
Scalar::Ptr(ptr) =>
write!(f, "{:?}", ptr),
&Scalar::Bits { bits, size } => {
&Scalar::Raw { data, size } => {
if size == 0 {
assert_eq!(bits, 0, "ZST value must be 0");
assert_eq!(data, 0, "ZST value must be 0");
write!(f, "<ZST>")
} else {
assert_eq!(truncate(bits, Size::from_bytes(size as u64)), bits,
"Scalar value {:#x} exceeds size of {} bytes", bits, size);
assert_eq!(truncate(data, Size::from_bytes(size as u64)), data,
"Scalar value {:#x} exceeds size of {} bytes", data, size);
// Format as hex number wide enough to fit any value of the given `size`.
// So bits=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
write!(f, "0x{:>0width$x}", bits, width=(size*2) as usize)
// So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
write!(f, "0x{:>0width$x}", data, width=(size*2) as usize)
}
}
}
@ -128,7 +128,7 @@ impl<Tag> fmt::Display for Scalar<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Scalar::Ptr(_) => write!(f, "a pointer"),
Scalar::Bits { bits, .. } => write!(f, "{}", bits),
Scalar::Raw { data, .. } => write!(f, "{}", data),
}
}
}
@ -138,7 +138,7 @@ impl<'tcx> Scalar<()> {
pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
Scalar::Bits { bits, size } => Scalar::Bits { bits, size },
Scalar::Raw { data, size } => Scalar::Raw { data, size },
}
}
@ -155,31 +155,31 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn erase_tag(self) -> Scalar {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
Scalar::Bits { bits, size } => Scalar::Bits { bits, size },
Scalar::Raw { data, size } => Scalar::Raw { data, size },
}
}
#[inline]
pub fn ptr_null(cx: &impl HasDataLayout) -> Self {
Scalar::Bits {
bits: 0,
Scalar::Raw {
data: 0,
size: cx.data_layout().pointer_size.bytes() as u8,
}
}
#[inline]
pub fn zst() -> Self {
Scalar::Bits { bits: 0, size: 0 }
Scalar::Raw { data: 0, size: 0 }
}
#[inline]
pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Ok(Scalar::Bits {
bits: dl.offset(bits as u64, i.bytes())? as u128,
Ok(Scalar::Raw {
data: dl.offset(data as u64, i.bytes())? as u128,
size,
})
}
@ -191,10 +191,10 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Scalar::Bits {
bits: dl.overflowing_offset(bits as u64, i.bytes()).0 as u128,
Scalar::Raw {
data: dl.overflowing_offset(data as u64, i.bytes()).0 as u128,
size,
}
}
@ -206,10 +206,10 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size().bytes());
Ok(Scalar::Bits {
bits: dl.signed_offset(bits as u64, i)? as u128,
Ok(Scalar::Raw {
data: dl.signed_offset(data as u64, i)? as u128,
size,
})
}
@ -221,10 +221,10 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Scalar::Bits {
bits: dl.overflowing_signed_offset(bits as u64, i128::from(i)).0 as u128,
Scalar::Raw {
data: dl.overflowing_signed_offset(data as u64, i128::from(i)).0 as u128,
size,
}
}
@ -237,9 +237,9 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn get_ptr_offset(self, cx: &impl HasDataLayout) -> Size {
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, cx.pointer_size().bytes());
Size::from_bytes(bits as u64)
Size::from_bytes(data as u64)
}
Scalar::Ptr(ptr) => ptr.offset,
}
@ -248,9 +248,9 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn is_null_ptr(self, cx: &impl HasDataLayout) -> bool {
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
bits == 0
data == 0
},
Scalar::Ptr(_) => false,
}
@ -258,12 +258,12 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn from_bool(b: bool) -> Self {
Scalar::Bits { bits: b as u128, size: 1 }
Scalar::Raw { data: b as u128, size: 1 }
}
#[inline]
pub fn from_char(c: char) -> Self {
Scalar::Bits { bits: c as u128, size: 4 }
Scalar::Raw { data: c as u128, size: 4 }
}
#[inline]
@ -271,7 +271,7 @@ impl<'tcx, Tag> Scalar<Tag> {
let i = i.into();
debug_assert_eq!(truncate(i, size), i,
"Unsigned value {} does not fit in {} bits", i, size.bits());
Scalar::Bits { bits: i, size: size.bytes() as u8 }
Scalar::Raw { data: i, size: size.bytes() as u8 }
}
#[inline]
@ -281,26 +281,26 @@ impl<'tcx, Tag> Scalar<Tag> {
let truncated = truncate(i as u128, size);
debug_assert_eq!(sign_extend(truncated, size) as i128, i,
"Signed value {} does not fit in {} bits", i, size.bits());
Scalar::Bits { bits: truncated, size: size.bytes() as u8 }
Scalar::Raw { data: truncated, size: size.bytes() as u8 }
}
#[inline]
pub fn from_f32(f: f32) -> Self {
Scalar::Bits { bits: f.to_bits() as u128, size: 4 }
Scalar::Raw { data: f.to_bits() as u128, size: 4 }
}
#[inline]
pub fn from_f64(f: f64) -> Self {
Scalar::Bits { bits: f.to_bits() as u128, size: 8 }
Scalar::Raw { data: f.to_bits() as u128, size: 8 }
}
#[inline]
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(target_size.bytes(), size as u64);
assert_ne!(size, 0, "to_bits cannot be used with zsts");
Ok(bits)
Ok(data)
}
Scalar::Ptr(_) => err!(ReadPointerAsBytes),
}
@ -309,8 +309,8 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
match self {
Scalar::Bits { bits: 0, .. } => err!(InvalidNullPointerUsage),
Scalar::Bits { .. } => err!(ReadBytesAsPointer),
Scalar::Raw { data: 0, .. } => err!(InvalidNullPointerUsage),
Scalar::Raw { .. } => err!(ReadBytesAsPointer),
Scalar::Ptr(p) => Ok(p),
}
}
@ -318,7 +318,7 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn is_bits(self) -> bool {
match self {
Scalar::Bits { .. } => true,
Scalar::Raw { .. } => true,
_ => false,
}
}
@ -333,8 +333,8 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
match self {
Scalar::Bits { bits: 0, size: 1 } => Ok(false),
Scalar::Bits { bits: 1, size: 1 } => Ok(true),
Scalar::Raw { data: 0, size: 1 } => Ok(false),
Scalar::Raw { data: 1, size: 1 } => Ok(true),
_ => err!(InvalidBool),
}
}

View File

@ -1669,10 +1669,7 @@ impl<'tcx> TerminatorKind<'tcx> {
.map(|&u| {
tcx.mk_const(ty::Const {
val: ConstValue::Scalar(
Scalar::Bits {
bits: u,
size: size.bytes() as u8,
}.into(),
Scalar::from_uint(u, size).into(),
),
ty: switch_ty,
}).to_string().into()

View File

@ -1001,7 +1001,7 @@ impl<'tcx> CommonConsts<'tcx> {
CommonConsts {
err: mk_const(ty::Const {
val: ConstValue::Scalar(Scalar::Bits { bits: 0, size: 0 }),
val: ConstValue::Scalar(Scalar::zst()),
ty: types.err,
}),
}

View File

@ -845,22 +845,22 @@ pub trait PrettyPrinter<'gcx: 'tcx, 'tcx>:
p!(write("{}", name));
return Ok(self);
}
if let ConstValue::Scalar(Scalar::Bits { bits, .. }) = ct.val {
if let ConstValue::Scalar(Scalar::Raw { data, .. }) = ct.val {
match ct.ty.sty {
ty::Bool => {
p!(write("{}", if bits == 0 { "false" } else { "true" }));
p!(write("{}", if data == 0 { "false" } else { "true" }));
return Ok(self);
},
ty::Float(ast::FloatTy::F32) => {
p!(write("{}f32", Single::from_bits(bits)));
p!(write("{}f32", Single::from_bits(data)));
return Ok(self);
},
ty::Float(ast::FloatTy::F64) => {
p!(write("{}f64", Double::from_bits(bits)));
p!(write("{}f64", Double::from_bits(data)));
return Ok(self);
},
ty::Uint(ui) => {
p!(write("{}{}", bits, ui));
p!(write("{}{}", data, ui));
return Ok(self);
},
ty::Int(i) =>{
@ -868,11 +868,11 @@ pub trait PrettyPrinter<'gcx: 'tcx, 'tcx>:
let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty))
.unwrap()
.size;
p!(write("{}{}", sign_extend(bits, size) as i128, i));
p!(write("{}{}", sign_extend(data, size) as i128, i));
return Ok(self);
},
ty::Char => {
p!(write("{:?}", ::std::char::from_u32(bits as u32).unwrap()));
p!(write("{:?}", ::std::char::from_u32(data as u32).unwrap()));
return Ok(self);
}
_ => {},

View File

@ -613,7 +613,7 @@ where
(ConstValue::Placeholder(p1), ConstValue::Placeholder(p2)) if p1 == p2 => {
Ok(a)
}
(ConstValue::Scalar(Scalar::Bits { .. }), _) if a == b => {
(ConstValue::Scalar(Scalar::Raw { .. }), _) if a == b => {
Ok(a)
}
(ConstValue::ByRef(..), _) => {

View File

@ -3,7 +3,7 @@
use crate::hir;
use crate::hir::def_id::DefId;
use crate::infer::canonical::Canonical;
use crate::mir::interpret::{ConstValue, truncate};
use crate::mir::interpret::ConstValue;
use crate::middle::region;
use polonius_engine::Atom;
use rustc_data_structures::indexed_vec::Idx;
@ -2232,14 +2232,12 @@ impl<'tcx> Const<'tcx> {
let size = tcx.layout_of(ty).unwrap_or_else(|e| {
panic!("could not compute layout for {:?}: {:?}", ty, e)
}).size;
let truncated = truncate(bits, size);
assert_eq!(truncated, bits, "from_bits called with untruncated value");
Self::from_scalar(tcx, Scalar::Bits { bits, size: size.bytes() as u8 }, ty.value)
Self::from_scalar(tcx, Scalar::from_uint(bits, size), ty.value)
}
#[inline]
pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
Self::from_scalar(tcx, Scalar::Bits { bits: 0, size: 0 }, ty)
Self::from_scalar(tcx, Scalar::zst(), ty)
}
#[inline]

View File

@ -294,13 +294,13 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
) -> &'ll Value {
let bitsize = if layout.is_bool() { 1 } else { layout.value.size(self).bits() };
match cv {
Scalar::Bits { size: 0, .. } => {
Scalar::Raw { size: 0, .. } => {
assert_eq!(0, layout.value.size(self).bytes());
self.const_undef(self.type_ix(0))
},
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, layout.value.size(self).bytes());
let llval = self.const_uint_big(self.type_ix(bitsize), bits);
let llval = self.const_uint_big(self.type_ix(bitsize), data);
if layout.value == layout::Pointer {
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
} else {

View File

@ -115,7 +115,7 @@ fn op_to_const<'tcx>(
ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
ptr.offset.bytes(),
),
Scalar::Bits { .. } => (
Scalar::Raw { .. } => (
ecx.tcx.intern_const_alloc(Allocation::from_byte_aligned_bytes(b"", ())),
0,
),

View File

@ -1,5 +1,5 @@
use syntax::ast;
use rustc::ty::{self, Ty, TyCtxt, ParamEnv};
use rustc::ty::{self, Ty, TyCtxt, ParamEnv, layout::Size};
use syntax_pos::symbol::Symbol;
use rustc::mir::interpret::{ConstValue, Scalar};
@ -23,10 +23,7 @@ crate fn lit_to_const<'a, 'gcx, 'tcx>(
trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
let result = truncate(n, width);
trace!("trunc result: {}", result);
Ok(ConstValue::Scalar(Scalar::Bits {
bits: result,
size: width.bytes() as u8,
}))
Ok(ConstValue::Scalar(Scalar::from_uint(result, width)))
};
use rustc::mir::interpret::*;
@ -50,10 +47,7 @@ crate fn lit_to_const<'a, 'gcx, 'tcx>(
let id = tcx.allocate_bytes(data);
ConstValue::Scalar(Scalar::Ptr(id.into()))
},
LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits {
bits: n as u128,
size: 1,
}),
LitKind::Byte(n) => ConstValue::Scalar(Scalar::from_uint(n, Size::from_bytes(1))),
LitKind::Int(n, _) if neg => {
let n = n as i128;
let n = n.overflowing_neg().0;
@ -84,7 +78,7 @@ fn parse_float<'tcx>(
let num = num.as_str();
use rustc_apfloat::ieee::{Single, Double};
use rustc_apfloat::Float;
let (bits, size) = match fty {
let (data, size) = match fty {
ast::FloatTy::F32 => {
num.parse::<f32>().map_err(|_| ())?;
let mut f = num.parse::<Single>().unwrap_or_else(|e| {
@ -107,5 +101,6 @@ fn parse_float<'tcx>(
}
};
Ok(ConstValue::Scalar(Scalar::Bits { bits, size }))
// We trust that `data` is properly truncated.
Ok(ConstValue::Scalar(Scalar::Raw { data, size }))
}

View File

@ -137,22 +137,22 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
match val {
Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_layout.ty),
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
debug_assert_eq!(size as u64, src_layout.size.bytes());
debug_assert_eq!(truncate(bits, Size::from_bytes(size.into())), bits,
debug_assert_eq!(truncate(data, Size::from_bytes(size.into())), data,
"Unexpected value of size {} before casting", size);
let res = match src_layout.ty.sty {
Float(fty) => self.cast_from_float(bits, fty, dest_layout.ty)?,
_ => self.cast_from_int(bits, src_layout, dest_layout)?,
Float(fty) => self.cast_from_float(data, fty, dest_layout.ty)?,
_ => self.cast_from_int(data, src_layout, dest_layout)?,
};
// Sanity check
match res {
Scalar::Ptr(_) => bug!("Fabricated a ptr value from an int...?"),
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
debug_assert_eq!(size as u64, dest_layout.size.bytes());
debug_assert_eq!(truncate(bits, Size::from_bytes(size.into())), bits,
debug_assert_eq!(truncate(data, Size::from_bytes(size.into())), data,
"Unexpected value of size {} after casting", size);
}
}

View File

@ -12,7 +12,6 @@ use std::borrow::Cow;
use rustc::ty::{self, Instance, ParamEnv, query::TyCtxtAt};
use rustc::ty::layout::{Align, TargetDataLayout, Size, HasDataLayout};
pub use rustc::mir::interpret::{truncate, write_target_uint, read_target_uint};
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use syntax::ast::Mutability;
@ -255,15 +254,15 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
let align = self.check_bounds_ptr(ptr, InboundsCheck::MaybeDead)?;
(ptr.offset.bytes(), align)
}
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, self.pointer_size().bytes());
assert!(bits < (1u128 << self.pointer_size().bits()));
assert!(data < (1u128 << self.pointer_size().bits()));
// check this is not NULL
if bits == 0 {
if data == 0 {
return err!(InvalidNullPointerUsage);
}
// the "base address" is 0 and hence always aligned
(bits as u64, required_align)
(data as u64, required_align)
}
};
// Check alignment

View File

@ -649,7 +649,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
}
(dataful_variant.as_u32() as u128, dataful_variant)
},
ScalarMaybeUndef::Scalar(Scalar::Bits { bits: raw_discr, size }) => {
ScalarMaybeUndef::Scalar(Scalar::Raw { data: raw_discr, size }) => {
assert_eq!(size as u64, discr_val.layout.size.bytes());
let adjusted_discr = raw_discr.wrapping_sub(niche_start)
.wrapping_add(variants_start);

View File

@ -686,7 +686,7 @@ where
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) =>
assert_eq!(self.pointer_size(), dest.layout.size,
"Size mismatch when writing pointer"),
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size, .. })) =>
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Raw { size, .. })) =>
assert_eq!(Size::from_bytes(size.into()), dest.layout.size,
"Size mismatch when writing bits"),
Immediate::Scalar(ScalarMaybeUndef::Undef) => {}, // undef can have any size

View File

@ -186,9 +186,9 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for Scalar
fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
match self {
Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),
Scalar::Bits{ size, bits } => Scalar::Bits {
Scalar::Raw{ size, data } => Scalar::Raw {
data: *data,
size: *size,
bits: *bits,
},
}
}

View File

@ -509,9 +509,9 @@ impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>
);
}
}
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, op.layout.size.bytes());
bits
data
}
};
// Now compare. This is slightly subtle because this is a special "wrap-around" range.

View File

@ -378,10 +378,7 @@ impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some(
ImmTy {
imm: Immediate::Scalar(
Scalar::Bits {
bits: n as u128,
size: self.tcx.data_layout.pointer_size.bytes() as u8,
}.into()
Scalar::from_uint(n, self.tcx.data_layout.pointer_size).into()
),
layout: self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
}.into()
@ -700,18 +697,18 @@ impl<'b, 'a, 'tcx> MutVisitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
.eval_operand(len, source_info)
.expect("len must be const");
let len = match self.ecx.read_scalar(len) {
Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
Ok(ScalarMaybeUndef::Scalar(Scalar::Raw {
data, ..
})) => data,
other => bug!("const len not primitive: {:?}", other),
};
let index = self
.eval_operand(index, source_info)
.expect("index must be const");
let index = match self.ecx.read_scalar(index) {
Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
Ok(ScalarMaybeUndef::Scalar(Scalar::Raw {
data, ..
})) => data,
other => bug!("const index not primitive: {:?}", other),
};
format!(