Miri: refactor new allocation tagging

This commit is contained in:
Ralf Jung 2019-04-15 10:05:13 +02:00
parent ee621f4232
commit 19485cc101
9 changed files with 99 additions and 164 deletions

View File

@ -45,12 +45,10 @@ pub struct Allocation<Tag=(),Extra=()> {
}
pub trait AllocationExtra<Tag, MemoryExtra>: ::std::fmt::Debug + Clone {
/// Hook to initialize the extra data when an allocation gets created.
fn memory_allocated(
_size: Size,
_memory_extra: &MemoryExtra
) -> Self;
pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Clone {
// There is no constructor in here because the constructor's type depends
// on `MemoryKind`, and making things sufficiently generic leads to painful
// inference failure.
/// Hook for performing extra checks on a memory read access.
///
@ -88,15 +86,8 @@ pub trait AllocationExtra<Tag, MemoryExtra>: ::std::fmt::Debug + Clone {
}
}
impl AllocationExtra<(), ()> for () {
#[inline(always)]
fn memory_allocated(
_size: Size,
_memory_extra: &()
) -> Self {
()
}
}
// For Tag=() and no extra state, we have is a trivial implementation.
impl AllocationExtra<()> for () { }
impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
@ -159,7 +150,7 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
}
/// Byte accessors
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
/// The last argument controls whether we error out when there are undefined
/// or pointer bytes. You should never call this, call `get_bytes` or
/// `get_bytes_with_undef_and_ptr` instead,
@ -167,15 +158,13 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// This function also guarantees that the resulting pointer will remain stable
/// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies
/// on that.
fn get_bytes_internal<MemoryExtra>(
fn get_bytes_internal(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
check_defined_and_ptr: bool,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.check_bounds(cx, ptr, size)?;
@ -196,14 +185,12 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}
#[inline]
pub fn get_bytes<MemoryExtra>(
pub fn get_bytes(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.get_bytes_internal(cx, ptr, size, true)
}
@ -211,28 +198,24 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// It is the caller's responsibility to handle undefined and pointer bytes.
/// However, this still checks that there are no relocations on the *edges*.
#[inline]
pub fn get_bytes_with_undef_and_ptr<MemoryExtra>(
pub fn get_bytes_with_undef_and_ptr(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.get_bytes_internal(cx, ptr, size, false)
}
/// Just calling this already marks everything as defined and removes relocations,
/// so be sure to actually put data there!
pub fn get_bytes_mut<MemoryExtra>(
pub fn get_bytes_mut(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &mut [u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
self.check_bounds(cx, ptr, size)?;
@ -250,16 +233,14 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}
/// Reading and writing
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
/// Reads bytes until a `0` is encountered. Will error if the end of the allocation is reached
/// before a `0` is found.
pub fn read_c_str<MemoryExtra>(
pub fn read_c_str(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
let offset = ptr.offset.bytes() as usize;
@ -278,15 +259,13 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Validates that `ptr.offset` and `ptr.offset + size` do not point to the middle of a
/// relocation. If `allow_ptr_and_undef` is `false`, also enforces that the memory in the
/// given range contains neither relocations nor undef bytes.
pub fn check_bytes<MemoryExtra>(
pub fn check_bytes(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
allow_ptr_and_undef: bool,
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
// Check bounds and relocations on the edges
self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
@ -301,14 +280,12 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Writes `src` to the memory starting at `ptr.offset`.
///
/// Will do bounds checks on the allocation.
pub fn write_bytes<MemoryExtra>(
pub fn write_bytes(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
src: &[u8],
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?;
bytes.clone_from_slice(src);
@ -316,15 +293,13 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}
/// Sets `count` bytes starting at `ptr.offset` with `val`. Basically `memset`.
pub fn write_repeat<MemoryExtra>(
pub fn write_repeat(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: u8,
count: Size
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let bytes = self.get_bytes_mut(cx, ptr, count)?;
for b in bytes {
@ -341,14 +316,12 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// being valid for ZSTs
///
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn read_scalar<MemoryExtra>(
pub fn read_scalar(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
// get_bytes_unchecked tests relocation edges
let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
@ -379,13 +352,11 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn read_ptr_sized<MemoryExtra>(
pub fn read_ptr_sized(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.read_scalar(cx, ptr, cx.data_layout().pointer_size)
}
@ -398,15 +369,13 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// being valid for ZSTs
///
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn write_scalar<MemoryExtra>(
pub fn write_scalar(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: ScalarMaybeUndef<Tag>,
type_size: Size,
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let val = match val {
ScalarMaybeUndef::Scalar(scalar) => scalar,
@ -446,14 +415,12 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn write_ptr_sized<MemoryExtra>(
pub fn write_ptr_sized(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: ScalarMaybeUndef<Tag>
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let ptr_size = cx.data_layout().pointer_size;
self.write_scalar(cx, ptr.into(), val, ptr_size)

View File

@ -94,11 +94,17 @@ impl<'tcx> Pointer<()> {
Pointer { alloc_id, offset, tag: () }
}
#[inline(always)]
pub fn with_tag<Tag>(self, tag: Tag) -> Pointer<Tag>
{
Pointer::new_with_tag(self.alloc_id, self.offset, tag)
}
#[inline(always)]
pub fn with_default_tag<Tag>(self) -> Pointer<Tag>
where Tag: Default
{
Pointer::new_with_tag(self.alloc_id, self.offset, Default::default())
self.with_tag(Tag::default())
}
}

View File

@ -119,13 +119,18 @@ impl<Tag> fmt::Display for Scalar<Tag> {
impl<'tcx> Scalar<()> {
#[inline]
pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
Scalar::Bits { bits, size } => Scalar::Bits { bits, size },
}
}
#[inline(always)]
pub fn with_default_tag<Tag>(self) -> Scalar<Tag>
where Tag: Default
{
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_default_tag()),
Scalar::Bits { bits, size } => Scalar::Bits { bits, size },
}
self.with_tag(Tag::default())
}
}
@ -138,14 +143,6 @@ impl<'tcx, Tag> Scalar<Tag> {
}
}
#[inline]
pub fn with_tag(self, new_tag: Tag) -> Self {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(Pointer { tag: new_tag, ..ptr }),
Scalar::Bits { bits, size } => Scalar::Bits { bits, size },
}
}
#[inline]
pub fn ptr_null(cx: &impl HasDataLayout) -> Self {
Scalar::Bits {
@ -434,13 +431,18 @@ impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
impl<'tcx> ScalarMaybeUndef<()> {
#[inline]
pub fn with_tag<Tag>(self, new_tag: Tag) -> ScalarMaybeUndef<Tag> {
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_tag(new_tag)),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}
#[inline(always)]
pub fn with_default_tag<Tag>(self) -> ScalarMaybeUndef<Tag>
where Tag: Default
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
self.with_tag(Tag::default())
}
}

View File

@ -11,7 +11,7 @@ use rustc::hir::def::Def;
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled};
use rustc::mir;
use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
use rustc::ty::layout::{self, LayoutOf, VariantIdx};
use rustc::ty::layout::{self, LayoutOf, VariantIdx, Size};
use rustc::ty::subst::Subst;
use rustc::traits::Reveal;
use rustc::util::common::ErrorReported;
@ -21,7 +21,7 @@ use syntax::ast::Mutability;
use syntax::source_map::{Span, DUMMY_SP};
use crate::interpret::{self,
PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar, Pointer,
PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar,
RawConst, ConstValue,
EvalResult, EvalError, InterpError, GlobalId, InterpretCx, StackPopCleanup,
Allocation, AllocId, MemoryKind,
@ -406,6 +406,15 @@ impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx>
Cow::Borrowed(alloc)
}
#[inline(always)]
fn new_allocation(
_size: Size,
_extra: &Self::MemoryExtra,
_kind: MemoryKind<!>,
) -> (Self::AllocExtra, Self::PointerTag) {
((), ())
}
fn box_alloc(
_ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
_dest: PlaceTy<'tcx>,
@ -439,15 +448,6 @@ impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx>
)
}
#[inline(always)]
fn tag_new_allocation(
_ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
ptr: Pointer,
_kind: MemoryKind<Self::MemoryKinds>,
) -> Pointer {
ptr
}
#[inline(always)]
fn stack_push(
_ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,

View File

@ -7,11 +7,11 @@ use std::hash::Hash;
use rustc::hir::{self, def_id::DefId};
use rustc::mir;
use rustc::ty::{self, query::TyCtxtAt};
use rustc::ty::{self, query::TyCtxtAt, layout::Size};
use super::{
Allocation, AllocId, EvalResult, Scalar, AllocationExtra,
InterpretCx, PlaceTy, MPlaceTy, OpTy, ImmTy, Pointer, MemoryKind,
InterpretCx, PlaceTy, MPlaceTy, OpTy, ImmTy, MemoryKind,
};
/// Whether this kind of memory is allowed to leak
@ -76,7 +76,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
type MemoryExtra: Default;
/// Extra data stored in every allocation.
type AllocExtra: AllocationExtra<Self::PointerTag, Self::MemoryExtra> + 'static;
type AllocExtra: AllocationExtra<Self::PointerTag> + 'static;
/// Memory's allocation map
type MemoryMap:
@ -139,18 +139,6 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
memory_extra: &Self::MemoryExtra,
) -> EvalResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag, Self::AllocExtra>>>;
/// Called to turn an allocation obtained from the `tcx` into one that has
/// the right type for this machine.
///
/// This should avoid copying if no work has to be done! If this returns an owned
/// allocation (because a copy had to be done to add tags or metadata), machine memory will
/// cache the result. (This relies on `AllocMap::get_or` being able to add the
/// owned allocation to the map even when the map is shared.)
fn adjust_static_allocation<'b>(
alloc: &'b Allocation,
memory_extra: &Self::MemoryExtra,
) -> Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>;
/// Called for all binary operations on integer(-like) types when one operand is a pointer
/// value, and for the `Offset` operation that is inherently about pointers.
///
@ -168,12 +156,24 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
dest: PlaceTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx>;
/// Adds the tag for a newly allocated pointer.
fn tag_new_allocation(
ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
ptr: Pointer,
/// Called to turn an allocation obtained from the `tcx` into one that has
/// the right type for this machine.
///
/// This should avoid copying if no work has to be done! If this returns an owned
/// allocation (because a copy had to be done to add tags or metadata), machine memory will
/// cache the result. (This relies on `AllocMap::get_or` being able to add the
/// owned allocation to the map even when the map is shared.)
fn adjust_static_allocation<'b>(
alloc: &'b Allocation,
memory_extra: &Self::MemoryExtra,
) -> Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>;
/// Computes the extra state and the tag for a new allocation.
fn new_allocation(
size: Size,
extra: &Self::MemoryExtra,
kind: MemoryKind<Self::MemoryKinds>,
) -> Pointer<Self::PointerTag>;
) -> (Self::AllocExtra, Self::PointerTag);
/// Executed when evaluating the `*` operator: Following a reference.
/// This has the chance to adjust the tag. It should not change anything else!

View File

@ -132,9 +132,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
size: Size,
align: Align,
kind: MemoryKind<M::MemoryKinds>,
) -> Pointer {
let extra = AllocationExtra::memory_allocated(size, &self.extra);
Pointer::from(self.allocate_with(Allocation::undef(size, align, extra), kind))
) -> Pointer<M::PointerTag> {
let (extra, tag) = M::new_allocation(size, &self.extra, kind);
Pointer::from(self.allocate_with(Allocation::undef(size, align, extra), kind)).with_tag(tag)
}
pub fn reallocate(
@ -145,7 +145,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
new_size: Size,
new_align: Align,
kind: MemoryKind<M::MemoryKinds>,
) -> EvalResult<'tcx, Pointer> {
) -> EvalResult<'tcx, Pointer<M::PointerTag>> {
if ptr.offset.bytes() != 0 {
return err!(ReallocateNonBasePtr);
}
@ -156,7 +156,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
self.copy(
ptr.into(),
old_align,
new_ptr.with_default_tag().into(),
new_ptr.into(),
new_align,
old_size.min(new_size),
/*nonoverlapping*/ true,

View File

@ -31,19 +31,6 @@ pub enum Immediate<Tag=(), Id=AllocId> {
ScalarPair(ScalarMaybeUndef<Tag, Id>, ScalarMaybeUndef<Tag, Id>),
}
impl Immediate {
#[inline]
pub fn with_default_tag<Tag>(self) -> Immediate<Tag>
where Tag: Default
{
match self {
Immediate::Scalar(x) => Immediate::Scalar(x.with_default_tag()),
Immediate::ScalarPair(x, y) =>
Immediate::ScalarPair(x.with_default_tag(), y.with_default_tag()),
}
}
}
impl<'tcx, Tag> Immediate<Tag> {
#[inline]
pub fn from_scalar(val: Scalar<Tag>) -> Self {
@ -142,18 +129,6 @@ pub enum Operand<Tag=(), Id=AllocId> {
Indirect(MemPlace<Tag, Id>),
}
impl Operand {
#[inline]
pub fn with_default_tag<Tag>(self) -> Operand<Tag>
where Tag: Default
{
match self {
Operand::Immediate(x) => Operand::Immediate(x.with_default_tag()),
Operand::Indirect(x) => Operand::Indirect(x.with_default_tag()),
}
}
}
impl<Tag> Operand<Tag> {
#[inline]
pub fn erase_tag(self) -> Operand
@ -554,16 +529,17 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
// We rely on mutability being set correctly in that allocation to prevent writes
// where none should happen -- and for `static mut`, we copy on demand anyway.
Operand::Indirect(
MemPlace::from_ptr(ptr, alloc.align)
).with_default_tag()
MemPlace::from_ptr(ptr.with_default_tag(), alloc.align)
)
},
ConstValue::Slice(a, b) =>
Operand::Immediate(Immediate::ScalarPair(
a.into(),
Scalar::from_uint(b, self.tcx.data_layout.pointer_size).into(),
)).with_default_tag(),
a.with_default_tag().into(),
Scalar::from_uint(b, self.tcx.data_layout.pointer_size)
.with_default_tag().into(),
)),
ConstValue::Scalar(x) =>
Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag(),
Operand::Immediate(Immediate::Scalar(x.with_default_tag().into())),
ConstValue::Unevaluated(def_id, substs) => {
let instance = self.resolve(def_id, substs)?;
return Ok(OpTy::from(self.const_eval_raw(GlobalId {

View File

@ -83,23 +83,19 @@ impl<'tcx, Tag> From<MPlaceTy<'tcx, Tag>> for PlaceTy<'tcx, Tag> {
}
}
impl MemPlace {
impl<Tag> MemPlace<Tag> {
/// Replace ptr tag, maintain vtable tag (if any)
#[inline]
pub fn with_default_tag<Tag>(self) -> MemPlace<Tag>
where Tag: Default
{
pub fn replace_tag(self, new_tag: Tag) -> Self {
MemPlace {
ptr: self.ptr.with_default_tag(),
ptr: self.ptr.erase_tag().with_tag(new_tag),
align: self.align,
meta: self.meta.map(Scalar::with_default_tag),
meta: self.meta,
}
}
}
impl<Tag> MemPlace<Tag> {
#[inline]
pub fn erase_tag(self) -> MemPlace
{
pub fn erase_tag(self) -> MemPlace {
MemPlace {
ptr: self.ptr.erase_tag(),
align: self.align,
@ -107,16 +103,6 @@ impl<Tag> MemPlace<Tag> {
}
}
#[inline]
pub fn with_tag(self, new_tag: Tag) -> Self
{
MemPlace {
ptr: self.ptr.with_tag(new_tag),
align: self.align,
meta: self.meta,
}
}
#[inline(always)]
pub fn from_scalar_ptr(ptr: Scalar<Tag>, align: Align) -> Self {
MemPlace {
@ -189,11 +175,11 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
}
}
/// Replace ptr tag, maintain vtable tag (if any)
#[inline]
pub fn with_tag(self, new_tag: Tag) -> Self
{
pub fn replace_tag(self, new_tag: Tag) -> Self {
MPlaceTy {
mplace: self.mplace.with_tag(new_tag),
mplace: self.mplace.replace_tag(new_tag),
layout: self.layout,
}
}
@ -312,7 +298,7 @@ where
M: Machine<'a, 'mir, 'tcx, PointerTag=Tag>,
// FIXME: Working around https://github.com/rust-lang/rust/issues/24159
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation<Tag, M::AllocExtra>)>,
M::AllocExtra: AllocationExtra<Tag, M::MemoryExtra>,
M::AllocExtra: AllocationExtra<Tag>,
{
/// Take a value, which represents a (thin or fat) reference, and make it a place.
/// Alignment is just based on the type. This is the inverse of `MemPlace::to_ref()`.
@ -943,7 +929,6 @@ where
let (size, align) = self.size_and_align_of(meta, local_layout)?
.expect("Cannot allocate for non-dyn-sized type");
let ptr = self.memory.allocate(size, align, MemoryKind::Stack);
let ptr = M::tag_new_allocation(self, ptr, MemoryKind::Stack);
let mplace = MemPlace { ptr: ptr.into(), align, meta };
if let Some(value) = old_val {
// Preserve old value.
@ -981,7 +966,6 @@ where
kind: MemoryKind<M::MemoryKinds>,
) -> MPlaceTy<'tcx, M::PointerTag> {
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind);
let ptr = M::tag_new_allocation(self, ptr, kind);
MPlaceTy::from_aligned_ptr(ptr, layout)
}

View File

@ -52,7 +52,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
ptr_size * (3 + methods.len() as u64),
ptr_align,
MemoryKind::Vtable,
).with_default_tag();
);
let tcx = &*self.tcx;
let drop = crate::monomorphize::resolve_drop_in_place(*tcx, ty);