Auto merge of #62428 - Centril:rollup-2udow5e, r=Centril

Rollup of 7 pull requests

Successful merges:

 - #62151 (Update linked OpenSSL version)
 - #62245 (Miri engine: support extra function (pointer) values)
 - #62257 (forward read_c_str method from Memory to Alloc)
 - #62264 (Fix perf regression from Miri Machine trait changes)
 - #62296 (request at least ptr-size alignment from posix_memalign)
 - #62329 (Remove support for 1-token lookahead from the lexer)
 - #62377 (Add test for ICE #62375)

Failed merges:

r? @ghost
This commit is contained in:
bors 2019-07-06 02:58:36 +00:00
commit b820c76174
25 changed files with 510 additions and 384 deletions

View File

@ -1835,7 +1835,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "openssl-src"
version = "111.1.0+1.1.1a"
version = "111.3.0+1.1.1c"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1848,7 +1848,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl-src 111.1.0+1.1.1a (registry+https://github.com/rust-lang/crates.io-index)",
"openssl-src 111.3.0+1.1.1c (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -4384,7 +4384,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum opener 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "998c59e83d9474c01127a96e023b7a04bb061dd286bf8bb939d31dc8d31a7448"
"checksum openssl 0.10.16 (registry+https://github.com/rust-lang/crates.io-index)" = "ec7bd7ca4cce6dbdc77e7c1230682740d307d1218a87fb0349a571272be749f9"
"checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
"checksum openssl-src 111.1.0+1.1.1a (registry+https://github.com/rust-lang/crates.io-index)" = "26bb632127731bf4ac49bf86a5dde12d2ca0918c2234fc39d79d4da2ccbc6da7"
"checksum openssl-src 111.3.0+1.1.1c (registry+https://github.com/rust-lang/crates.io-index)" = "53ed5f31d294bdf5f7a4ba0a206c2754b0f60e9a63b7e3076babc5317873c797"
"checksum openssl-sys 0.9.43 (registry+https://github.com/rust-lang/crates.io-index)" = "33c86834957dd5b915623e94f2f4ab2c70dd8f6b70679824155d5ae21dbd495d"
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
"checksum ordslice 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dd20eec3dbe4376829cb7d80ae6ac45e0a766831dca50202ff2d40db46a8a024"

View File

@ -1,6 +1,6 @@
use std::alloc::{Global, Alloc, Layout, System};
/// Issue #45955.
/// Issue #45955 and #62251.
#[test]
fn alloc_system_overaligned_request() {
check_overalign_requests(System)
@ -12,21 +12,23 @@ fn std_heap_overaligned_request() {
}
fn check_overalign_requests<T: Alloc>(mut allocator: T) {
let size = 8;
let align = 16; // greater than size
let iterations = 100;
unsafe {
let pointers: Vec<_> = (0..iterations).map(|_| {
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
}).collect();
for &ptr in &pointers {
assert_eq!((ptr.as_ptr() as usize) % align, 0,
"Got a pointer less aligned than requested")
}
for &align in &[4, 8, 16, 32] { // less than and bigger than `MIN_ALIGN`
for &size in &[align/2, align-1] { // size less than alignment
let iterations = 128;
unsafe {
let pointers: Vec<_> = (0..iterations).map(|_| {
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
}).collect();
for &ptr in &pointers {
assert_eq!((ptr.as_ptr() as usize) % align, 0,
"Got a pointer less aligned than requested")
}
// Clean up
for &ptr in &pointers {
allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap())
// Clean up
for &ptr in &pointers {
allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap())
}
}
}
}
}

View File

@ -11,7 +11,7 @@ use rustc::hir::def::DefKind;
use rustc::hir::def_id::DefId;
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef};
use rustc::mir;
use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
use rustc::ty::{self, TyCtxt};
use rustc::ty::layout::{self, LayoutOf, VariantIdx};
use rustc::ty::subst::Subst;
use rustc::traits::Reveal;
@ -23,7 +23,7 @@ use crate::interpret::{self,
PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar,
RawConst, ConstValue,
InterpResult, InterpErrorInfo, InterpError, GlobalId, InterpCx, StackPopCleanup,
Allocation, AllocId, MemoryKind, Memory,
Allocation, AllocId, MemoryKind,
snapshot, RefTracking, intern_const_alloc_recursive,
};
@ -316,6 +316,7 @@ impl interpret::MayLeak for ! {
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
type MemoryKinds = !;
type PointerTag = ();
type ExtraFnVal = !;
type FrameExtra = ();
type MemoryExtra = ();
@ -370,6 +371,16 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
}))
}
fn call_extra_fn(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
fn_val: !,
_args: &[OpTy<'tcx>],
_dest: Option<PlaceTy<'tcx>>,
_ret: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
match fn_val {}
}
fn call_intrinsic(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
@ -398,18 +409,18 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
}
fn find_foreign_static(
_tcx: TyCtxt<'tcx>,
_def_id: DefId,
_tcx: TyCtxtAt<'tcx>,
) -> InterpResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> {
err!(ReadForeignStatic)
}
#[inline(always)]
fn tag_allocation<'b>(
_memory_extra: &(),
_id: AllocId,
alloc: Cow<'b, Allocation>,
_kind: Option<MemoryKind<!>>,
_memory: &Memory<'mir, 'tcx, Self>,
) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) {
// We do not use a tag so we can just cheaply forward the allocation
(alloc, ())
@ -417,8 +428,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
#[inline(always)]
fn tag_static_base_pointer(
_memory_extra: &(),
_id: AllocId,
_memory: &Memory<'mir, 'tcx, Self>,
) -> Self::PointerTag {
()
}

View File

@ -11,7 +11,7 @@ use rustc::mir::interpret::{
};
use rustc::mir::CastKind;
use super::{InterpCx, Machine, PlaceTy, OpTy, Immediate};
use super::{InterpCx, Machine, PlaceTy, OpTy, Immediate, FnVal};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
@ -86,7 +86,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
def_id,
substs,
).ok_or_else(|| InterpError::TooGeneric.into());
let fn_ptr = self.memory.create_fn_alloc(instance?);
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance?));
self.write_scalar(Scalar::Ptr(fn_ptr.into()), dest)?;
}
_ => bug!("reify fn pointer on {:?}", src.layout.ty),
@ -115,7 +115,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
substs,
ty::ClosureKind::FnOnce,
);
let fn_ptr = self.memory.create_fn_alloc(instance);
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
let val = Immediate::Scalar(Scalar::Ptr(fn_ptr.into()).into());
self.write_immediate(val, dest)?;
}

View File

@ -222,6 +222,23 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&mut self.memory
}
#[inline(always)]
pub fn force_ptr(
&self,
scalar: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
self.memory.force_ptr(scalar)
}
#[inline(always)]
pub fn force_bits(
&self,
scalar: Scalar<M::PointerTag>,
size: Size
) -> InterpResult<'tcx, u128> {
self.memory.force_bits(scalar, size)
}
#[inline(always)]
pub fn tag_static_base_pointer(&self, ptr: Pointer) -> Pointer<M::PointerTag> {
self.memory.tag_static_base_pointer(ptr)
@ -253,6 +270,27 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.frame().body
}
#[inline(always)]
pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
assert!(ty.abi.is_signed());
sign_extend(value, ty.size)
}
#[inline(always)]
pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
truncate(value, ty.size)
}
#[inline]
pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
ty.is_sized(self.tcx, self.param_env)
}
#[inline]
pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
ty.is_freeze(*self.tcx, self.param_env, DUMMY_SP)
}
pub(super) fn subst_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
&self,
substs: T,
@ -288,14 +326,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
).ok_or_else(|| InterpError::TooGeneric.into())
}
pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
ty.is_sized(self.tcx, self.param_env)
}
pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
ty.is_freeze(*self.tcx, self.param_env, DUMMY_SP)
}
pub fn load_mir(
&self,
instance: ty::InstanceDef<'tcx>,
@ -766,32 +796,4 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
frames
}
#[inline(always)]
pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
assert!(ty.abi.is_signed());
sign_extend(value, ty.size)
}
#[inline(always)]
pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
truncate(value, ty.size)
}
#[inline(always)]
pub fn force_ptr(
&self,
scalar: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
self.memory.force_ptr(scalar)
}
#[inline(always)]
pub fn force_bits(
&self,
scalar: Scalar<M::PointerTag>,
size: Size
) -> InterpResult<'tcx, u128> {
self.memory.force_bits(scalar, size)
}
}

View File

@ -7,11 +7,11 @@ use std::hash::Hash;
use rustc::hir::def_id::DefId;
use rustc::mir;
use rustc::ty::{self, query::TyCtxtAt};
use rustc::ty::{self, TyCtxt};
use super::{
Allocation, AllocId, InterpResult, Scalar, AllocationExtra,
InterpCx, PlaceTy, OpTy, ImmTy, MemoryKind, Pointer, Memory
Allocation, AllocId, InterpResult, InterpError, Scalar, AllocationExtra,
InterpCx, PlaceTy, OpTy, ImmTy, MemoryKind, Pointer, Memory,
};
/// Whether this kind of memory is allowed to leak
@ -67,6 +67,11 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// The `default()` is used for pointers to consts, statics, vtables and functions.
type PointerTag: ::std::fmt::Debug + Copy + Eq + Hash + 'static;
/// Machines can define extra (non-instance) things that represent values of function pointers.
/// For example, Miri uses this to return a fucntion pointer from `dlsym`
/// that can later be called to execute the right thing.
type ExtraFnVal: ::std::fmt::Debug + Copy;
/// Extra data stored in every call frame.
type FrameExtra;
@ -119,6 +124,16 @@ pub trait Machine<'mir, 'tcx>: Sized {
ret: Option<mir::BasicBlock>,
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>>;
/// Execute `fn_val`. it is the hook's responsibility to advance the instruction
/// pointer as appropriate.
fn call_extra_fn(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
fn_val: Self::ExtraFnVal,
args: &[OpTy<'tcx, Self::PointerTag>],
dest: Option<PlaceTy<'tcx, Self::PointerTag>>,
ret: Option<mir::BasicBlock>,
) -> InterpResult<'tcx>;
/// Directly process an intrinsic without pushing a stack frame.
/// If this returns successfully, the engine will take care of jumping to the next block.
fn call_intrinsic(
@ -136,8 +151,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
///
/// This allocation will then be fed to `tag_allocation` to initialize the "extra" state.
fn find_foreign_static(
tcx: TyCtxt<'tcx>,
def_id: DefId,
tcx: TyCtxtAt<'tcx>,
) -> InterpResult<'tcx, Cow<'tcx, Allocation>>;
/// Called for all binary operations on integer(-like) types when one operand is a pointer
@ -174,10 +189,10 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// For static allocations, the tag returned must be the same as the one returned by
/// `tag_static_base_pointer`.
fn tag_allocation<'b>(
memory_extra: &Self::MemoryExtra,
id: AllocId,
alloc: Cow<'b, Allocation>,
kind: Option<MemoryKind<Self::MemoryKinds>>,
memory: &Memory<'mir, 'tcx, Self>,
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag);
/// Return the "base" tag for the given static allocation: the one that is used for direct
@ -186,8 +201,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// Be aware that requesting the `Allocation` for that `id` will lead to cycles
/// for cyclic statics!
fn tag_static_base_pointer(
memory_extra: &Self::MemoryExtra,
id: AllocId,
memory: &Memory<'mir, 'tcx, Self>,
) -> Self::PointerTag;
/// Executes a retagging operation
@ -209,20 +224,22 @@ pub trait Machine<'mir, 'tcx>: Sized {
extra: Self::FrameExtra,
) -> InterpResult<'tcx>;
#[inline(always)]
fn int_to_ptr(
int: u64,
_mem: &Memory<'mir, 'tcx, Self>,
int: u64,
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
if int == 0 {
err!(InvalidNullPointerUsage)
Err((if int == 0 {
InterpError::InvalidNullPointerUsage
} else {
err!(ReadBytesAsPointer)
}
InterpError::ReadBytesAsPointer
}).into())
}
#[inline(always)]
fn ptr_to_int(
_ptr: Pointer<Self::PointerTag>,
_mem: &Memory<'mir, 'tcx, Self>,
_ptr: Pointer<Self::PointerTag>,
) -> InterpResult<'tcx, u64> {
err!(ReadPointerAsBytes)
}

View File

@ -54,6 +54,26 @@ pub enum AllocCheck {
MaybeDead,
}
/// The value of a function pointer.
#[derive(Debug, Copy, Clone)]
pub enum FnVal<'tcx, Other> {
Instance(Instance<'tcx>),
Other(Other),
}
impl<'tcx, Other> FnVal<'tcx, Other> {
pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
match self {
FnVal::Instance(instance) =>
Ok(instance),
FnVal::Other(_) =>
err!(MachineError(
format!("Expected instance function pointer, got 'other' pointer")
)),
}
}
}
// `Memory` has to depend on the `Machine` because some of its operations
// (e.g., `get`) call a `Machine` hook.
pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
@ -69,16 +89,20 @@ pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
// FIXME: this should not be public, but interning currently needs access to it
pub(super) alloc_map: M::MemoryMap,
/// Map for "extra" function pointers.
extra_fn_ptr_map: FxHashMap<AllocId, M::ExtraFnVal>,
/// To be able to compare pointers with NULL, and to check alignment for accesses
/// to ZSTs (where pointers may dangle), we keep track of the size even for allocations
/// that do not exist any more.
// FIXME: this should not be public, but interning currently needs access to it
pub(super) dead_alloc_map: FxHashMap<AllocId, (Size, Align)>,
/// Extra data added by the machine.
pub extra: M::MemoryExtra,
/// Lets us implement `HasDataLayout`, which is awfully convenient.
pub(super) tcx: TyCtxtAt<'tcx>,
pub tcx: TyCtxtAt<'tcx>,
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for Memory<'mir, 'tcx, M> {
@ -98,6 +122,7 @@ where
fn clone(&self) -> Self {
Memory {
alloc_map: self.alloc_map.clone(),
extra_fn_ptr_map: self.extra_fn_ptr_map.clone(),
dead_alloc_map: self.dead_alloc_map.clone(),
extra: (),
tcx: self.tcx,
@ -109,6 +134,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
pub fn new(tcx: TyCtxtAt<'tcx>, extra: M::MemoryExtra) -> Self {
Memory {
alloc_map: M::MemoryMap::default(),
extra_fn_ptr_map: FxHashMap::default(),
dead_alloc_map: FxHashMap::default(),
extra,
tcx,
@ -117,11 +143,24 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
#[inline]
pub fn tag_static_base_pointer(&self, ptr: Pointer) -> Pointer<M::PointerTag> {
ptr.with_tag(M::tag_static_base_pointer(ptr.alloc_id, &self))
ptr.with_tag(M::tag_static_base_pointer(&self.extra, ptr.alloc_id))
}
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> Pointer<M::PointerTag> {
let id = self.tcx.alloc_map.lock().create_fn_alloc(instance);
pub fn create_fn_alloc(
&mut self,
fn_val: FnVal<'tcx, M::ExtraFnVal>,
) -> Pointer<M::PointerTag>
{
let id = match fn_val {
FnVal::Instance(instance) => self.tcx.alloc_map.lock().create_fn_alloc(instance),
FnVal::Other(extra) => {
// FIXME(RalfJung): Should we have a cache here?
let id = self.tcx.alloc_map.lock().reserve();
let old = self.extra_fn_ptr_map.insert(id, extra);
assert!(old.is_none());
id
}
};
self.tag_static_base_pointer(Pointer::from(id))
}
@ -150,7 +189,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
kind: MemoryKind<M::MemoryKinds>,
) -> Pointer<M::PointerTag> {
let id = self.tcx.alloc_map.lock().reserve();
let (alloc, tag) = M::tag_allocation(id, Cow::Owned(alloc), Some(kind), &self);
let (alloc, tag) = M::tag_allocation(&self.extra, id, Cow::Owned(alloc), Some(kind));
self.alloc_map.insert(id, (kind, alloc.into_owned()));
Pointer::from(id).with_tag(tag)
}
@ -368,9 +407,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// contains a reference to memory that was created during its evaluation (i.e., not to
/// another static), those inner references only exist in "resolved" form.
fn get_static_alloc(
id: AllocId,
memory_extra: &M::MemoryExtra,
tcx: TyCtxtAt<'tcx>,
memory: &Memory<'mir, 'tcx, M>,
id: AllocId,
) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::PointerTag, M::AllocExtra>>> {
let alloc = tcx.alloc_map.lock().get(id);
let alloc = match alloc {
@ -384,7 +423,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// We got a "lazy" static that has not been computed yet.
if tcx.is_foreign_item(def_id) {
trace!("static_alloc: foreign item {:?}", def_id);
M::find_foreign_static(def_id, tcx)?
M::find_foreign_static(tcx.tcx, def_id)?
} else {
trace!("static_alloc: Need to compute {:?}", def_id);
let instance = Instance::mono(tcx.tcx, def_id);
@ -414,10 +453,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// We got tcx memory. Let the machine figure out whether and how to
// turn that into memory with the right pointer tag.
Ok(M::tag_allocation(
memory_extra,
id, // always use the ID we got as input, not the "hidden" one.
alloc,
M::STATIC_KIND.map(MemoryKind::Machine),
memory
).0)
}
@ -430,7 +469,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// `get_static_alloc` that we can actually use directly without inserting anything anywhere.
// So the error type is `InterpResult<'tcx, &Allocation<M::PointerTag>>`.
let a = self.alloc_map.get_or(id, || {
let alloc = Self::get_static_alloc(id, self.tcx, &self).map_err(Err)?;
let alloc = Self::get_static_alloc(&self.extra, self.tcx, id).map_err(Err)?;
match alloc {
Cow::Borrowed(alloc) => {
// We got a ref, cheaply return that as an "error" so that the
@ -459,11 +498,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
id: AllocId,
) -> InterpResult<'tcx, &mut Allocation<M::PointerTag, M::AllocExtra>> {
let tcx = self.tcx;
let alloc = Self::get_static_alloc(id, tcx, &self);
let memory_extra = &self.extra;
let a = self.alloc_map.get_mut_or(id, || {
// Need to make a copy, even if `get_static_alloc` is able
// to give us a cheap reference.
let alloc = alloc?;
let alloc = Self::get_static_alloc(memory_extra, tcx, id)?;
if alloc.mutability == Mutability::Immutable {
return err!(ModifiedConstantMemory);
}
@ -495,56 +534,65 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
id: AllocId,
liveness: AllocCheck,
) -> InterpResult<'static, (Size, Align)> {
// Regular allocations.
if let Ok(alloc) = self.get(id) {
return Ok((Size::from_bytes(alloc.bytes.len() as u64), alloc.align));
}
// can't do this in the match argument, we may get cycle errors since the lock would get
// dropped after the match.
// Function pointers.
if let Ok(_) = self.get_fn_alloc(id) {
return if let AllocCheck::Dereferencable = liveness {
// The caller requested no function pointers.
err!(DerefFunctionPointer)
} else {
Ok((Size::ZERO, Align::from_bytes(1).unwrap()))
};
}
// Foreign statics.
// Can't do this in the match argument, we may get cycle errors since the lock would
// be held throughout the match.
let alloc = self.tcx.alloc_map.lock().get(id);
// Could also be a fn ptr or extern static
match alloc {
Some(GlobalAlloc::Function(..)) => {
if let AllocCheck::Dereferencable = liveness {
// The caller requested no function pointers.
err!(DerefFunctionPointer)
} else {
Ok((Size::ZERO, Align::from_bytes(1).unwrap()))
}
}
// `self.get` would also work, but can cause cycles if a static refers to itself
Some(GlobalAlloc::Static(did)) => {
// The only way `get` couldn't have worked here is if this is an extern static
assert!(self.tcx.is_foreign_item(did));
// Use size and align of the type
let ty = self.tcx.type_of(did);
let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap();
Ok((layout.size, layout.align.abi))
return Ok((layout.size, layout.align.abi));
}
_ => {
if let Ok(alloc) = self.get(id) {
Ok((Size::from_bytes(alloc.bytes.len() as u64), alloc.align))
}
else if let AllocCheck::MaybeDead = liveness {
// Deallocated pointers are allowed, we should be able to find
// them in the map.
Ok(*self.dead_alloc_map.get(&id)
.expect("deallocated pointers should all be recorded in `dead_alloc_map`"))
} else {
err!(DanglingPointerDeref)
}
},
_ => {}
}
// The rest must be dead.
if let AllocCheck::MaybeDead = liveness {
// Deallocated pointers are allowed, we should be able to find
// them in the map.
Ok(*self.dead_alloc_map.get(&id)
.expect("deallocated pointers should all be recorded in `dead_alloc_map`"))
} else {
err!(DanglingPointerDeref)
}
}
pub fn get_fn(&self, ptr: Pointer<M::PointerTag>) -> InterpResult<'tcx, Instance<'tcx>> {
fn get_fn_alloc(&self, id: AllocId) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
trace!("reading fn ptr: {}", id);
if let Some(extra) = self.extra_fn_ptr_map.get(&id) {
Ok(FnVal::Other(*extra))
} else {
match self.tcx.alloc_map.lock().get(id) {
Some(GlobalAlloc::Function(instance)) => Ok(FnVal::Instance(instance)),
_ => Err(InterpError::ExecuteMemory.into()),
}
}
}
pub fn get_fn(
&self,
ptr: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
let ptr = self.force_ptr(ptr)?; // We definitely need a pointer value.
if ptr.offset.bytes() != 0 {
return err!(InvalidFunctionPointer);
}
trace!("reading fn ptr: {}", ptr.alloc_id);
match self.tcx.alloc_map.lock().get(ptr.alloc_id) {
Some(GlobalAlloc::Function(instance)) => Ok(instance),
_ => Err(InterpError::ExecuteMemory.into()),
}
self.get_fn_alloc(ptr.alloc_id)
}
pub fn mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
@ -680,6 +728,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// Reading and writing.
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// Reads the given number of bytes from memory. Returns them as a slice.
///
/// Performs appropriate bounds checks.
pub fn read_bytes(
&self,
@ -693,6 +743,14 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
self.get(ptr.alloc_id)?.get_bytes(self, ptr, size)
}
/// Reads a 0-terminated sequence of bytes from memory. Returns them as a slice.
///
/// Performs appropriate bounds checks.
pub fn read_c_str(&self, ptr: Scalar<M::PointerTag>) -> InterpResult<'tcx, &[u8]> {
let ptr = self.force_ptr(ptr)?; // We need to read at least 1 byte, so we *need* a ptr.
self.get(ptr.alloc_id)?.read_c_str(self, ptr)
}
/// Performs appropriate bounds checks.
pub fn copy(
&mut self,
@ -890,7 +948,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
match scalar {
Scalar::Ptr(ptr) => Ok(ptr),
_ => M::int_to_ptr(scalar.to_usize(self)?, self)
_ => M::int_to_ptr(&self, scalar.to_usize(self)?)
}
}
@ -901,7 +959,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
) -> InterpResult<'tcx, u128> {
match scalar.to_bits_or_ptr(size, self) {
Ok(bits) => Ok(bits),
Err(ptr) => Ok(M::ptr_to_int(ptr, self)? as u128)
Err(ptr) => Ok(M::ptr_to_int(&self, ptr)? as u128)
}
}
}

View File

@ -24,7 +24,7 @@ pub use self::eval_context::{
pub use self::place::{Place, PlaceTy, MemPlace, MPlaceTy};
pub use self::memory::{Memory, MemoryKind, AllocCheck};
pub use self::memory::{Memory, MemoryKind, AllocCheck, FnVal};
pub use self::machine::{Machine, AllocMap, MayLeak};

View File

@ -6,9 +6,9 @@ use rustc::ty::layout::{self, TyLayout, LayoutOf};
use syntax::source_map::Span;
use rustc_target::spec::abi::Abi;
use rustc::mir::interpret::{InterpResult, PointerArithmetic, InterpError, Scalar};
use super::{
InterpCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
InterpResult, PointerArithmetic, InterpError, Scalar,
InterpCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup, FnVal,
};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
@ -76,16 +76,16 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
let func = self.eval_operand(func, None)?;
let (fn_def, abi) = match func.layout.ty.sty {
let (fn_val, abi) = match func.layout.ty.sty {
ty::FnPtr(sig) => {
let caller_abi = sig.abi();
let fn_ptr = self.force_ptr(self.read_scalar(func)?.not_undef()?)?;
let instance = self.memory.get_fn(fn_ptr)?;
(instance, caller_abi)
let fn_ptr = self.read_scalar(func)?.not_undef()?;
let fn_val = self.memory.get_fn(fn_ptr)?;
(fn_val, caller_abi)
}
ty::FnDef(def_id, substs) => {
let sig = func.layout.ty.fn_sig(*self.tcx);
(self.resolve(def_id, substs)?, sig.abi())
(FnVal::Instance(self.resolve(def_id, substs)?), sig.abi())
},
_ => {
let msg = format!("can't handle callee of type {:?}", func.layout.ty);
@ -94,7 +94,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
let args = self.eval_operands(args)?;
self.eval_fn_call(
fn_def,
fn_val,
terminator.source_info.span,
abi,
&args[..],
@ -228,14 +228,21 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Call this function -- pushing the stack frame and initializing the arguments.
fn eval_fn_call(
&mut self,
instance: ty::Instance<'tcx>,
fn_val: FnVal<'tcx, M::ExtraFnVal>,
span: Span,
caller_abi: Abi,
args: &[OpTy<'tcx, M::PointerTag>],
dest: Option<PlaceTy<'tcx, M::PointerTag>>,
ret: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
trace!("eval_fn_call: {:#?}", instance);
trace!("eval_fn_call: {:#?}", fn_val);
let instance = match fn_val {
FnVal::Instance(instance) => instance,
FnVal::Other(extra) => {
return M::call_extra_fn(self, extra, args, dest, ret);
}
};
match instance.def {
ty::InstanceDef::Intrinsic(..) => {
@ -431,8 +438,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.tcx.data_layout.pointer_align.abi,
)?.expect("cannot be a ZST");
let fn_ptr = self.memory.get(vtable_slot.alloc_id)?
.read_ptr_sized(self, vtable_slot)?.to_ptr()?;
let instance = self.memory.get_fn(fn_ptr)?;
.read_ptr_sized(self, vtable_slot)?.not_undef()?;
let drop_fn = self.memory.get_fn(fn_ptr)?;
// `*mut receiver_place.layout.ty` is almost the layout that we
// want for args[0]: We have to project to field 0 because we want
@ -447,7 +454,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
});
trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function
self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret)
}
}
}
@ -482,7 +489,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
self.eval_fn_call(
instance,
FnVal::Instance(instance),
span,
Abi::Rust,
&[arg.into()],

View File

@ -2,7 +2,7 @@ use rustc::ty::{self, Ty, Instance};
use rustc::ty::layout::{Size, Align, LayoutOf};
use rustc::mir::interpret::{Scalar, Pointer, InterpResult, PointerArithmetic};
use super::{InterpCx, InterpError, Machine, MemoryKind};
use super::{InterpCx, InterpError, Machine, MemoryKind, FnVal};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
@ -56,7 +56,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let tcx = &*self.tcx;
let drop = Instance::resolve_drop_in_place(*tcx, ty);
let drop = self.memory.create_fn_alloc(drop);
let drop = self.memory.create_fn_alloc(FnVal::Instance(drop));
// no need to do any alignment checks on the memory accesses below, because we know the
// allocation is correctly aligned as we created it above. Also we're only offsetting by
@ -84,7 +84,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
def_id,
substs,
).ok_or_else(|| InterpError::TooGeneric)?;
let fn_ptr = self.memory.create_fn_alloc(instance);
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
let method_ptr = vtable.offset(ptr_size * (3 + i as u64), self)?;
self.memory
.get_mut(method_ptr.alloc_id)?
@ -112,8 +112,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let drop_fn = self.memory
.get(vtable.alloc_id)?
.read_ptr_sized(self, vtable)?
.to_ptr()?;
let drop_instance = self.memory.get_fn(drop_fn)?;
.not_undef()?;
// We *need* an instance here, no other kind of function value, to be able
// to determine the type.
let drop_instance = self.memory.get_fn(drop_fn)?.as_instance()?;
trace!("Found drop fn: {:?}", drop_instance);
let fn_sig = drop_instance.ty(*self.tcx).fn_sig(*self.tcx);
let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.param_env, &fn_sig);

View File

@ -6,13 +6,11 @@ use rustc::hir;
use rustc::ty::layout::{self, TyLayout, LayoutOf, VariantIdx};
use rustc::ty;
use rustc_data_structures::fx::FxHashSet;
use rustc::mir::interpret::{
GlobalAlloc, InterpResult, InterpError,
};
use std::hash::Hash;
use super::{
GlobalAlloc, InterpResult, InterpError,
OpTy, Machine, InterpCx, ValueVisitor, MPlaceTy,
};
@ -153,15 +151,16 @@ fn wrapping_range_format(r: &RangeInclusive<u128>, max_hi: u128) -> String {
debug_assert!(hi <= max_hi);
if lo > hi {
format!("less or equal to {}, or greater or equal to {}", hi, lo)
} else if lo == hi {
format!("equal to {}", lo)
} else if lo == 0 {
debug_assert!(hi < max_hi, "should not be printing if the range covers everything");
format!("less or equal to {}", hi)
} else if hi == max_hi {
debug_assert!(lo > 0, "should not be printing if the range covers everything");
format!("greater or equal to {}", lo)
} else {
if lo == 0 {
debug_assert!(hi < max_hi, "should not be printing if the range covers everything");
format!("less or equal to {}", hi)
} else if hi == max_hi {
format!("greater or equal to {}", lo)
} else {
format!("in the range {:?}", r)
}
format!("in the range {:?}", r)
}
}
@ -457,10 +456,10 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
}
ty::FnPtr(_sig) => {
let value = value.to_scalar_or_undef();
let ptr = try_validation!(value.to_ptr(),
value, self.path, "a pointer");
let _fn = try_validation!(self.ecx.memory.get_fn(ptr),
value, self.path, "a function pointer");
let _fn = try_validation!(
value.not_undef().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
value, self.path, "a function pointer"
);
// FIXME: Check if the signature matches
}
// This should be all the primitive types
@ -504,20 +503,18 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
if lo == 1 && hi == max_hi {
// Only NULL is the niche. So make sure the ptr is NOT NULL.
if self.ecx.memory.ptr_may_be_null(ptr) {
// These conditions are just here to improve the diagnostics so we can
// differentiate between null pointers and dangling pointers
if self.ref_tracking_for_consts.is_some() &&
self.ecx.memory.get(ptr.alloc_id).is_err() &&
self.ecx.memory.get_fn(ptr).is_err() {
return validation_failure!(
"encountered dangling pointer", self.path
);
}
return validation_failure!("a potentially NULL pointer", self.path);
return validation_failure!(
"a potentially NULL pointer",
self.path,
format!(
"something that cannot possibly fail to be {}",
wrapping_range_format(&layout.valid_range, max_hi)
)
);
}
return Ok(());
} else {
// Conservatively, we reject, because the pointer *could* have this
// Conservatively, we reject, because the pointer *could* have a bad
// value.
return validation_failure!(
"a pointer",

View File

@ -53,7 +53,7 @@ impl<'a> SpanUtils<'a> {
pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
let mut toks = self.retokenise_span(span);
loop {
let next = toks.real_token();
let next = toks.next_token();
if next == token::Eof {
return None;
}

View File

@ -38,17 +38,17 @@ pub fn render_with_highlighting(
FileName::Custom(String::from("rustdoc-highlighting")),
src.to_owned(),
);
let highlight_result =
lexer::StringReader::new_or_buffered_errs(&sess, fm, None).and_then(|lexer| {
let mut classifier = Classifier::new(lexer, sess.source_map());
let highlight_result = {
let lexer = lexer::StringReader::new(&sess, fm, None);
let mut classifier = Classifier::new(lexer, sess.source_map());
let mut highlighted_source = vec![];
if classifier.write_source(&mut highlighted_source).is_err() {
Err(classifier.lexer.buffer_fatal_errors())
} else {
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
}
});
let mut highlighted_source = vec![];
if classifier.write_source(&mut highlighted_source).is_err() {
Err(classifier.lexer.buffer_fatal_errors())
} else {
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
}
};
match highlight_result {
Ok(highlighted_source) => {
@ -79,6 +79,7 @@ pub fn render_with_highlighting(
/// each span of text in sequence.
struct Classifier<'a> {
lexer: lexer::StringReader<'a>,
peek_token: Option<Token>,
source_map: &'a SourceMap,
// State of the classifier.
@ -178,6 +179,7 @@ impl<'a> Classifier<'a> {
fn new(lexer: lexer::StringReader<'a>, source_map: &'a SourceMap) -> Classifier<'a> {
Classifier {
lexer,
peek_token: None,
source_map,
in_attribute: false,
in_macro: false,
@ -187,10 +189,19 @@ impl<'a> Classifier<'a> {
/// Gets the next token out of the lexer.
fn try_next_token(&mut self) -> Result<Token, HighlightError> {
match self.lexer.try_next_token() {
Ok(token) => Ok(token),
Err(_) => Err(HighlightError::LexError),
if let Some(token) = self.peek_token.take() {
return Ok(token);
}
self.lexer.try_next_token().map_err(|()| HighlightError::LexError)
}
fn peek(&mut self) -> Result<&Token, HighlightError> {
if self.peek_token.is_none() {
self.peek_token = Some(
self.lexer.try_next_token().map_err(|()| HighlightError::LexError)?
);
}
Ok(self.peek_token.as_ref().unwrap())
}
/// Exhausts the `lexer` writing the output into `out`.
@ -234,7 +245,7 @@ impl<'a> Classifier<'a> {
// reference or dereference operator or a reference or pointer type, instead of the
// bit-and or multiplication operator.
token::BinOp(token::And) | token::BinOp(token::Star)
if self.lexer.peek() != &token::Whitespace => Class::RefKeyWord,
if self.peek()? != &token::Whitespace => Class::RefKeyWord,
// Consider this as part of a macro invocation if there was a
// leading identifier.
@ -257,7 +268,7 @@ impl<'a> Classifier<'a> {
token::Question => Class::QuestionMark,
token::Dollar => {
if self.lexer.peek().is_ident() {
if self.peek()?.is_ident() {
self.in_macro_nonterminal = true;
Class::MacroNonTerminal
} else {
@ -280,9 +291,9 @@ impl<'a> Classifier<'a> {
// as an attribute.
// Case 1: #![inner_attribute]
if self.lexer.peek() == &token::Not {
if self.peek()? == &token::Not {
self.try_next_token()?; // NOTE: consumes `!` token!
if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
if self.peek()? == &token::OpenDelim(token::Bracket) {
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
@ -292,7 +303,7 @@ impl<'a> Classifier<'a> {
}
// Case 2: #[outer_attribute]
if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
if self.peek()? == &token::OpenDelim(token::Bracket) {
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
@ -341,7 +352,7 @@ impl<'a> Classifier<'a> {
if self.in_macro_nonterminal {
self.in_macro_nonterminal = false;
Class::MacroNonTerminal
} else if self.lexer.peek() == &token::Not {
} else if self.peek()? == &token::Not {
self.in_macro = true;
Class::Macro
} else {

View File

@ -32,7 +32,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
dox[code_block.code].to_owned(),
);
let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| {
let errors = {
let mut lexer = Lexer::new(&sess, source_file, None);
while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
if kind == token::Eof {
break;
@ -46,7 +47,7 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
} else {
Ok(())
}
});
};
if let Err(errors) = errors {
let mut diag = if let Some(sp) =

View File

@ -6,6 +6,10 @@ use crate::alloc::{GlobalAlloc, Layout, System};
unsafe impl GlobalAlloc for System {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
// jemalloc provides alignment less than MIN_ALIGN for small allocations.
// So only rely on MIN_ALIGN if size >= align.
// Also see <https://github.com/rust-lang/rust/issues/45955> and
// <https://github.com/rust-lang/rust/issues/62251#issuecomment-507580914>.
if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
libc::malloc(layout.size()) as *mut u8
} else {
@ -21,6 +25,7 @@ unsafe impl GlobalAlloc for System {
#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
// See the comment above in `alloc` for why this check looks the way it does.
if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
libc::calloc(layout.size(), 1) as *mut u8
} else {
@ -80,7 +85,10 @@ unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 {
#[inline]
unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 {
let mut out = ptr::null_mut();
let ret = libc::posix_memalign(&mut out, layout.align(), layout.size());
// posix_memalign requires that the alignment be a multiple of `sizeof(void*)`.
// Since these are all powers of 2, we can just use max.
let align = layout.align().max(crate::mem::size_of::<usize>());
let ret = libc::posix_memalign(&mut out, align, layout.size());
if ret != 0 {
ptr::null_mut()
} else {

View File

@ -268,7 +268,7 @@ fn read_block_comment(rdr: &mut StringReader<'_>,
while level > 0 {
debug!("=== block comment level {}", level);
if rdr.is_eof() {
rdr.fatal("unterminated block comment").raise();
rdr.fatal_span_(rdr.pos, rdr.pos, "unterminated block comment").raise();
}
if rdr.ch_is('\n') {
trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col);
@ -346,7 +346,7 @@ pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) ->
srdr.read_to_string(&mut src).unwrap();
let cm = SourceMap::new(sess.source_map().path_mapping().clone());
let source_file = cm.new_source_file(path, src);
let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
let mut rdr = lexer::StringReader::new(sess, source_file, None);
let mut comments: Vec<Comment> = Vec::new();
let mut code_to_the_left = false; // Only code

View File

@ -38,9 +38,6 @@ pub struct StringReader<'a> {
crate source_file: Lrc<syntax_pos::SourceFile>,
/// Stop reading src at this index.
crate end_src_index: usize,
// cached:
peek_token: Token,
peek_span_src_raw: Span,
fatal_errs: Vec<DiagnosticBuilder<'a>>,
// cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.source_file.src.as_ref().unwrap()` all the time.
@ -49,15 +46,59 @@ pub struct StringReader<'a> {
}
impl<'a> StringReader<'a> {
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.mk_sp_and_raw(lo, hi).0
pub fn new(sess: &'a ParseSess,
source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Self {
let mut sr = StringReader::new_internal(sess, source_file, override_span);
sr.bump();
sr
}
fn mk_sp_and_raw(&self, lo: BytePos, hi: BytePos) -> (Span, Span) {
let raw = Span::new(lo, hi, NO_EXPANSION);
let real = self.override_span.unwrap_or(raw);
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
let begin = sess.source_map().lookup_byte_offset(span.lo());
let end = sess.source_map().lookup_byte_offset(span.hi());
(real, raw)
// Make the range zero-length if the span is invalid.
if span.lo() > span.hi() || begin.sf.start_pos != end.sf.start_pos {
span = span.shrink_to_lo();
}
let mut sr = StringReader::new_internal(sess, begin.sf, None);
// Seek the lexer to the right byte range.
sr.next_pos = span.lo();
sr.end_src_index = sr.src_index(span.hi());
sr.bump();
sr
}
fn new_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Self
{
if source_file.src.is_none() {
sess.span_diagnostic.bug(&format!("Cannot lex source_file without source: {}",
source_file.name));
}
let src = (*source_file.src.as_ref().unwrap()).clone();
StringReader {
sess,
next_pos: source_file.start_pos,
pos: source_file.start_pos,
ch: Some('\n'),
source_file,
end_src_index: src.len(),
src,
fatal_errs: Vec::new(),
override_span,
}
}
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.override_span.unwrap_or_else(|| Span::new(lo, hi, NO_EXPANSION))
}
fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
@ -70,35 +111,32 @@ impl<'a> StringReader<'a> {
}
}
fn next_token(&mut self) -> Token where Self: Sized {
let res = self.try_next_token();
self.unwrap_or_abort(res)
}
/// Returns the next token. EFFECT: advances the string_reader.
/// Returns the next token, including trivia like whitespace or comments.
///
/// `Err(())` means that some errors were encountered, which can be
/// retrieved using `buffer_fatal_errors`.
pub fn try_next_token(&mut self) -> Result<Token, ()> {
assert!(self.fatal_errs.is_empty());
let ret_val = self.peek_token.take();
self.advance_token()?;
Ok(ret_val)
}
fn try_real_token(&mut self) -> Result<Token, ()> {
let mut t = self.try_next_token()?;
loop {
match t.kind {
token::Whitespace | token::Comment | token::Shebang(_) => {
t = self.try_next_token()?;
}
_ => break,
match self.scan_whitespace_or_comment() {
Some(comment) => Ok(comment),
None => {
let (kind, start_pos, end_pos) = if self.is_eof() {
(token::Eof, self.source_file.end_pos, self.source_file.end_pos)
} else {
let start_pos = self.pos;
(self.next_token_inner()?, start_pos, self.pos)
};
let span = self.mk_sp(start_pos, end_pos);
Ok(Token::new(kind, span))
}
}
Ok(t)
}
pub fn real_token(&mut self) -> Token {
let res = self.try_real_token();
/// Returns the next token, including trivia like whitespace or comments.
///
/// Aborts in case of an error.
pub fn next_token(&mut self) -> Token {
let res = self.try_next_token();
self.unwrap_or_abort(res)
}
@ -120,10 +158,6 @@ impl<'a> StringReader<'a> {
FatalError.raise();
}
fn fatal(&self, m: &str) -> FatalError {
self.fatal_span(self.peek_token.span, m)
}
crate fn emit_fatal_errors(&mut self) {
for err in &mut self.fatal_errs {
err.emit();
@ -142,81 +176,6 @@ impl<'a> StringReader<'a> {
buffer
}
pub fn peek(&self) -> &Token {
&self.peek_token
}
/// For comments.rs, which hackily pokes into next_pos and ch
fn new_raw(sess: &'a ParseSess,
source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, source_file, override_span);
sr.bump();
sr
}
fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Self
{
if source_file.src.is_none() {
sess.span_diagnostic.bug(&format!("Cannot lex source_file without source: {}",
source_file.name));
}
let src = (*source_file.src.as_ref().unwrap()).clone();
StringReader {
sess,
next_pos: source_file.start_pos,
pos: source_file.start_pos,
ch: Some('\n'),
source_file,
end_src_index: src.len(),
peek_token: Token::dummy(),
peek_span_src_raw: syntax_pos::DUMMY_SP,
src,
fatal_errs: Vec::new(),
override_span,
}
}
pub fn new_or_buffered_errs(sess: &'a ParseSess,
source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
let mut sr = StringReader::new_raw(sess, source_file, override_span);
if sr.advance_token().is_err() {
Err(sr.buffer_fatal_errors())
} else {
Ok(sr)
}
}
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
let begin = sess.source_map().lookup_byte_offset(span.lo());
let end = sess.source_map().lookup_byte_offset(span.hi());
// Make the range zero-length if the span is invalid.
if span.lo() > span.hi() || begin.sf.start_pos != end.sf.start_pos {
span = span.shrink_to_lo();
}
let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
// Seek the lexer to the right byte range.
sr.next_pos = span.lo();
sr.end_src_index = sr.src_index(span.hi());
sr.bump();
if sr.advance_token().is_err() {
sr.emit_fatal_errors();
FatalError.raise();
}
sr
}
#[inline]
fn ch_is(&self, c: char) -> bool {
self.ch == Some(c)
@ -269,30 +228,6 @@ impl<'a> StringReader<'a> {
self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..])
}
/// Advance peek_token to refer to the next token, and
/// possibly update the interner.
fn advance_token(&mut self) -> Result<(), ()> {
match self.scan_whitespace_or_comment() {
Some(comment) => {
self.peek_span_src_raw = comment.span;
self.peek_token = comment;
}
None => {
let (kind, start_pos, end_pos) = if self.is_eof() {
(token::Eof, self.source_file.end_pos, self.source_file.end_pos)
} else {
let start_pos = self.pos;
(self.next_token_inner()?, start_pos, self.pos)
};
let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
self.peek_token = Token::new(kind, real);
self.peek_span_src_raw = raw;
}
}
Ok(())
}
#[inline]
fn src_index(&self, pos: BytePos) -> usize {
(pos - self.source_file.start_pos).to_usize()
@ -1462,12 +1397,7 @@ mod tests {
teststr: String)
-> StringReader<'a> {
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
let mut sr = StringReader::new_raw(sess, sf, None);
if sr.advance_token().is_err() {
sr.emit_fatal_errors();
FatalError.raise();
}
sr
StringReader::new(sess, sf, None)
}
#[test]
@ -1489,17 +1419,17 @@ mod tests {
assert_eq!(tok1.kind, tok2.kind);
assert_eq!(tok1.span, tok2.span);
assert_eq!(string_reader.next_token(), token::Whitespace);
// the 'main' id is already read:
assert_eq!(string_reader.pos.clone(), BytePos(28));
// read another token:
let tok3 = string_reader.next_token();
assert_eq!(string_reader.pos.clone(), BytePos(28));
let tok4 = Token::new(
mk_ident("main"),
Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
);
assert_eq!(tok3.kind, tok4.kind);
assert_eq!(tok3.span, tok4.span);
// the lparen is already read:
assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
assert_eq!(string_reader.pos.clone(), BytePos(29))
})
}

View File

@ -4,13 +4,14 @@ use crate::print::pprust::token_to_string;
use crate::parse::lexer::{StringReader, UnmatchedBrace};
use crate::parse::token::{self, Token};
use crate::parse::PResult;
use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
use crate::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint};
impl<'a> StringReader<'a> {
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
let mut tt_reader = TokenTreesReader {
string_reader: self,
token: Token::dummy(),
joint_to_prev: Joint,
open_braces: Vec::new(),
unmatched_braces: Vec::new(),
matching_delim_spans: Vec::new(),
@ -24,6 +25,7 @@ impl<'a> StringReader<'a> {
struct TokenTreesReader<'a> {
string_reader: StringReader<'a>,
token: Token,
joint_to_prev: IsJoint,
/// Stack of open delimiters and their spans. Used for error message.
open_braces: Vec<(token::DelimToken, Span)>,
unmatched_braces: Vec<UnmatchedBrace>,
@ -203,21 +205,26 @@ impl<'a> TokenTreesReader<'a> {
},
_ => {
let tt = TokenTree::Token(self.token.take());
// Note that testing for joint-ness here is done via the raw
// source span as the joint-ness is a property of the raw source
// rather than wanting to take `override_span` into account.
// Additionally, we actually check if the *next* pair of tokens
// is joint, but this is equivalent to checking the current pair.
let raw = self.string_reader.peek_span_src_raw;
self.real_token();
let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
&& self.token.is_op();
let is_joint = self.joint_to_prev == Joint && self.token.is_op();
Ok((tt, if is_joint { Joint } else { NonJoint }))
}
}
}
fn real_token(&mut self) {
self.token = self.string_reader.real_token();
self.joint_to_prev = Joint;
loop {
let token = self.string_reader.next_token();
match token.kind {
token::Whitespace | token::Comment | token::Shebang(_) => {
self.joint_to_prev = NonJoint;
}
_ => {
self.token = token;
return;
},
}
}
}
}

View File

@ -308,7 +308,7 @@ pub fn maybe_file_to_stream(
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
let srdr = lexer::StringReader::new(sess, source_file, override_span);
let (token_trees, unmatched_braces) = srdr.into_token_trees();
match token_trees {

View File

@ -1,5 +1,10 @@
#![allow(const_err)] // make sure we cannot allow away the errors tested here
#[repr(transparent)]
#[derive(Copy, Clone)]
struct Wrap<T>(T);
#[repr(usize)]
#[derive(Copy, Clone)]
enum Enum {
@ -7,11 +12,20 @@ enum Enum {
}
union TransmuteEnum {
in1: &'static u8,
in2: usize,
out1: Enum,
out2: Wrap<Enum>,
}
// A pointer is guaranteed non-null
const BAD_ENUM: Enum = unsafe { TransmuteEnum { in1: &1 }.out1 };
const GOOD_ENUM: Enum = unsafe { TransmuteEnum { in2: 0 }.out1 };
const BAD_ENUM: Enum = unsafe { TransmuteEnum { in2: 1 }.out1 };
//~^ ERROR is undefined behavior
const BAD_ENUM_PTR: Enum = unsafe { TransmuteEnum { in1: &1 }.out1 };
//~^ ERROR is undefined behavior
const BAD_ENUM_WRAPPED: Wrap<Enum> = unsafe { TransmuteEnum { in1: &1 }.out2 };
//~^ ERROR is undefined behavior
// (Potentially) invalid enum discriminant
@ -20,9 +34,7 @@ const BAD_ENUM: Enum = unsafe { TransmuteEnum { in1: &1 }.out1 };
enum Enum2 {
A = 2,
}
#[repr(transparent)]
#[derive(Copy, Clone)]
struct Wrap<T>(T);
union TransmuteEnum2 {
in1: usize,
in2: &'static u8,
@ -33,17 +45,17 @@ union TransmuteEnum2 {
}
const BAD_ENUM2: Enum2 = unsafe { TransmuteEnum2 { in1: 0 }.out1 };
//~^ ERROR is undefined behavior
const BAD_ENUM3: Enum2 = unsafe { TransmuteEnum2 { in2: &0 }.out1 };
const BAD_ENUM2_PTR: Enum2 = unsafe { TransmuteEnum2 { in2: &0 }.out1 };
//~^ ERROR is undefined behavior
const BAD_ENUM4: Wrap<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out2 };
const BAD_ENUM2_WRAPPED: Wrap<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out2 };
//~^ ERROR is undefined behavior
// Undef enum discriminant.
const BAD_ENUM_UNDEF : Enum2 = unsafe { TransmuteEnum2 { in3: () }.out1 };
const BAD_ENUM2_UNDEF : Enum2 = unsafe { TransmuteEnum2 { in3: () }.out1 };
//~^ ERROR is undefined behavior
// Pointer value in an enum with a niche that is not just 0.
const BAD_ENUM_PTR: Option<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out3 };
const BAD_ENUM2_OPTION_PTR: Option<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out3 };
//~^ ERROR is undefined behavior
// Invalid enum field content (mostly to test printing of paths for enum tuple
@ -53,7 +65,7 @@ union TransmuteChar {
b: char,
}
// Need to create something which does not clash with enum layout optimizations.
const BAD_ENUM_CHAR: Option<(char, char)> = Some(('x', unsafe { TransmuteChar { a: !0 }.b }));
const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { TransmuteChar { a: !0 }.b }));
//~^ ERROR is undefined behavior
fn main() {

View File

@ -1,13 +1,29 @@
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:14:1
--> $DIR/ub-enum.rs:22:1
|
LL | const BAD_ENUM: Enum = unsafe { TransmuteEnum { in1: &1 }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected a valid enum discriminant
LL | const BAD_ENUM: Enum = unsafe { TransmuteEnum { in2: 1 }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 1, but expected a valid enum discriminant
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:34:1
--> $DIR/ub-enum.rs:25:1
|
LL | const BAD_ENUM_PTR: Enum = unsafe { TransmuteEnum { in1: &1 }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected a valid enum discriminant
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:28:1
|
LL | const BAD_ENUM_WRAPPED: Wrap<Enum> = unsafe { TransmuteEnum { in1: &1 }.out2 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected something that cannot possibly fail to be equal to 0
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:46:1
|
LL | const BAD_ENUM2: Enum2 = unsafe { TransmuteEnum2 { in1: 0 }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 0, but expected a valid enum discriminant
@ -15,45 +31,45 @@ LL | const BAD_ENUM2: Enum2 = unsafe { TransmuteEnum2 { in1: 0 }.out1 };
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:36:1
--> $DIR/ub-enum.rs:48:1
|
LL | const BAD_ENUM3: Enum2 = unsafe { TransmuteEnum2 { in2: &0 }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected a valid enum discriminant
LL | const BAD_ENUM2_PTR: Enum2 = unsafe { TransmuteEnum2 { in2: &0 }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected a valid enum discriminant
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:38:1
--> $DIR/ub-enum.rs:50:1
|
LL | const BAD_ENUM4: Wrap<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out2 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected something that cannot possibly fail to be in the range 2..=2
LL | const BAD_ENUM2_WRAPPED: Wrap<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out2 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected something that cannot possibly fail to be equal to 2
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:42:1
--> $DIR/ub-enum.rs:54:1
|
LL | const BAD_ENUM_UNDEF : Enum2 = unsafe { TransmuteEnum2 { in3: () }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered uninitialized bytes, but expected a valid enum discriminant
LL | const BAD_ENUM2_UNDEF : Enum2 = unsafe { TransmuteEnum2 { in3: () }.out1 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered uninitialized bytes, but expected a valid enum discriminant
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:46:1
--> $DIR/ub-enum.rs:58:1
|
LL | const BAD_ENUM_PTR: Option<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out3 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected a valid enum discriminant
LL | const BAD_ENUM2_OPTION_PTR: Option<Enum2> = unsafe { TransmuteEnum2 { in2: &0 }.out3 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a pointer, but expected a valid enum discriminant
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:56:1
--> $DIR/ub-enum.rs:68:1
|
LL | const BAD_ENUM_CHAR: Option<(char, char)> = Some(('x', unsafe { TransmuteChar { a: !0 }.b }));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 4294967295 at .<downcast-variant(Some)>.0.1, but expected something less or equal to 1114111
LL | const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { TransmuteChar { a: !0 }.b }));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 4294967295 at .<downcast-variant(Some)>.0.1, but expected something less or equal to 1114111
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error: aborting due to 7 previous errors
error: aborting due to 9 previous errors
For more information about this error, try `rustc --explain E0080`.

View File

@ -5,9 +5,19 @@ use std::mem;
use std::ptr::NonNull;
use std::num::{NonZeroU8, NonZeroUsize};
const NON_NULL: NonNull<u8> = unsafe { mem::transmute(1usize) };
const NON_NULL_PTR: NonNull<u8> = unsafe { mem::transmute(&1) };
const NULL_PTR: NonNull<u8> = unsafe { mem::transmute(0usize) };
//~^ ERROR it is undefined behavior to use this value
const OUT_OF_BOUNDS_PTR: NonNull<u8> = { unsafe {
//~^ ERROR it is undefined behavior to use this value
let ptr: &(u8, u8, u8) = mem::transmute(&0u8); // &0 gets promoted so it does not dangle
let out_of_bounds_ptr = &ptr.2; // use address-of-field for pointer arithmetic
mem::transmute(out_of_bounds_ptr)
} };
const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) };
//~^ ERROR it is undefined behavior to use this value
const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) };

View File

@ -1,5 +1,5 @@
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:8:1
--> $DIR/ub-nonnull.rs:11:1
|
LL | const NULL_PTR: NonNull<u8> = unsafe { mem::transmute(0usize) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 0, but expected something greater or equal to 1
@ -7,7 +7,20 @@ LL | const NULL_PTR: NonNull<u8> = unsafe { mem::transmute(0usize) };
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:11:1
--> $DIR/ub-nonnull.rs:14:1
|
LL | / const OUT_OF_BOUNDS_PTR: NonNull<u8> = { unsafe {
LL | |
LL | | let ptr: &(u8, u8, u8) = mem::transmute(&0u8); // &0 gets promoted so it does not dangle
LL | | let out_of_bounds_ptr = &ptr.2; // use address-of-field for pointer arithmetic
LL | | mem::transmute(out_of_bounds_ptr)
LL | | } };
| |____^ type validation failed: encountered a potentially NULL pointer, but expected something that cannot possibly fail to be greater or equal to 1
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:21:1
|
LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 0, but expected something greater or equal to 1
@ -15,7 +28,7 @@ LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) };
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:13:1
--> $DIR/ub-nonnull.rs:23:1
|
LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 0, but expected something greater or equal to 1
@ -23,7 +36,7 @@ LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) };
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:20:1
--> $DIR/ub-nonnull.rs:30:1
|
LL | const UNINIT: NonZeroU8 = unsafe { Transmute { uninit: () }.out };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered uninitialized bytes, but expected something greater or equal to 1
@ -31,7 +44,7 @@ LL | const UNINIT: NonZeroU8 = unsafe { Transmute { uninit: () }.out };
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:28:1
--> $DIR/ub-nonnull.rs:38:1
|
LL | const BAD_RANGE1: RestrictedRange1 = unsafe { RestrictedRange1(42) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 42, but expected something in the range 10..=30
@ -39,13 +52,13 @@ LL | const BAD_RANGE1: RestrictedRange1 = unsafe { RestrictedRange1(42) };
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:34:1
--> $DIR/ub-nonnull.rs:44:1
|
LL | const BAD_RANGE2: RestrictedRange2 = unsafe { RestrictedRange2(20) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered 20, but expected something less or equal to 10, or greater or equal to 30
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
error: aborting due to 6 previous errors
error: aborting due to 7 previous errors
For more information about this error, try `rustc --explain E0080`.

View File

@ -0,0 +1,9 @@
enum A {
Value(())
}
fn main() {
let a = A::Value(());
a == A::Value;
//~^ ERROR binary operation `==` cannot be applied to type `A`
}

View File

@ -0,0 +1,13 @@
error[E0369]: binary operation `==` cannot be applied to type `A`
--> $DIR/issue-62375.rs:7:7
|
LL | a == A::Value;
| - ^^ -------- fn(()) -> A {A::Value}
| |
| A
|
= note: an implementation of `std::cmp::PartialEq` might be missing for `A`
error: aborting due to previous error
For more information about this error, try `rustc --explain E0369`.