Rollup merge of #40559 - nagisa:manually-drop, r=alexcrichton

Implement Manually Drop

As the RFC has been from approx a week in FCP without any major comments, I’m taking the opportunity to submit the PR early.
This commit is contained in:
Corey Farwell 2017-04-11 18:36:12 -04:00 committed by GitHub
commit acb43ce573
12 changed files with 147 additions and 64 deletions

View File

@ -114,6 +114,7 @@
- [loop_break_value](loop-break-value.md)
- [macro_reexport](macro-reexport.md)
- [main](main.md)
- [manually_drop](manually-drop.md)
- [map_entry_recover_keys](map-entry-recover-keys.md)
- [mpsc_select](mpsc-select.md)
- [n16](n16.md)

View File

@ -44,6 +44,7 @@
#![feature(heap_api)]
#![feature(inclusive_range)]
#![feature(lang_items)]
#![feature(manually_drop)]
#![feature(nonzero)]
#![feature(pattern)]
#![feature(placement_in)]

View File

@ -1558,7 +1558,7 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
// performance than with the 2nd method.
//
// All methods were benchmarked, and the 3rd showed best results. So we chose that one.
let mut tmp = NoDrop { value: ptr::read(&v[0]) };
let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
// Intermediate state of the insertion process is always tracked by `hole`, which
// serves two purposes:
@ -1571,13 +1571,13 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
// fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
// initially held exactly once.
let mut hole = InsertionHole {
src: &mut tmp.value,
src: &mut *tmp,
dest: &mut v[1],
};
ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
for i in 2..v.len() {
if !is_less(&v[i], &tmp.value) {
if !is_less(&v[i], &*tmp) {
break;
}
ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
@ -1587,12 +1587,6 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
}
}
// Holds a value, but never drops it.
#[allow(unions_with_drop_fields)]
union NoDrop<T> {
value: T
}
// When dropped, copies from `src` into `dest`.
struct InsertionHole<T> {
src: *mut T,

View File

@ -691,9 +691,6 @@ extern "rust-intrinsic" {
/// initialize memory previous set to the result of `uninit`.
pub fn uninit<T>() -> T;
/// Moves a value out of scope without running drop glue.
pub fn forget<T>(_: T) -> ();
/// Reinterprets the bits of a value of one type as another type.
///
/// Both types must have the same size. Neither the original, nor the result,

View File

@ -171,7 +171,7 @@ pub use intrinsics::transmute;
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn forget<T>(t: T) {
unsafe { intrinsics::forget(t) }
ManuallyDrop::new(t);
}
/// Returns the size of a type in bytes.
@ -736,3 +736,121 @@ pub fn discriminant<T>(v: &T) -> Discriminant<T> {
}
}
/// A wrapper to inhibit compiler from automatically calling `T`s destructor.
///
/// This wrapper is 0-cost.
///
/// # Examples
///
/// This wrapper helps with explicitly documenting the drop order dependencies between fields of
/// the type:
///
/// ```rust
/// # #![feature(manually_drop)]
/// use std::mem::ManuallyDrop;
/// struct Peach;
/// struct Banana;
/// struct Melon;
/// struct FruitBox {
/// // Immediately clear theres something non-trivial going on with these fields.
/// peach: ManuallyDrop<Peach>,
/// melon: Melon, // Field thats independent of the other two.
/// banana: ManuallyDrop<Banana>,
/// }
///
/// impl Drop for FruitBox {
/// fn drop(&mut self) {
/// unsafe {
/// // Explicit ordering in which field destructors are run specified in the intuitive
/// // location the destructor of the structure containing the fields.
/// // Moreover, one can now reorder fields within the struct however much they want.
/// ManuallyDrop::drop(&mut self.peach);
/// ManuallyDrop::drop(&mut self.banana);
/// }
/// // After destructor for `FruitBox` runs (this function), the destructor for Melon gets
/// // invoked in the usual manner, as it is not wrapped in `ManuallyDrop`.
/// }
/// }
/// ```
#[unstable(feature = "manually_drop", issue = "40673")]
#[allow(unions_with_drop_fields)]
pub union ManuallyDrop<T>{ value: T }
impl<T> ManuallyDrop<T> {
/// Wrap a value to be manually dropped.
///
/// # Examples
///
/// ```rust
/// # #![feature(manually_drop)]
/// use std::mem::ManuallyDrop;
/// ManuallyDrop::new(Box::new(()));
/// ```
#[unstable(feature = "manually_drop", issue = "40673")]
#[inline]
pub fn new(value: T) -> ManuallyDrop<T> {
ManuallyDrop { value: value }
}
/// Extract the value from the ManuallyDrop container.
///
/// # Examples
///
/// ```rust
/// # #![feature(manually_drop)]
/// use std::mem::ManuallyDrop;
/// let x = ManuallyDrop::new(Box::new(()));
/// let _: Box<()> = ManuallyDrop::into_inner(x);
/// ```
#[unstable(feature = "manually_drop", issue = "40673")]
#[inline]
pub fn into_inner(slot: ManuallyDrop<T>) -> T {
unsafe {
slot.value
}
}
/// Manually drops the contained value.
///
/// # Unsafety
///
/// This function runs the destructor of the contained value and thus the wrapped value
/// now represents uninitialized data. It is up to the user of this method to ensure the
/// uninitialized data is not actually used.
#[unstable(feature = "manually_drop", issue = "40673")]
#[inline]
pub unsafe fn drop(slot: &mut ManuallyDrop<T>) {
ptr::drop_in_place(&mut slot.value)
}
}
#[unstable(feature = "manually_drop", issue = "40673")]
impl<T> ::ops::Deref for ManuallyDrop<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe {
&self.value
}
}
}
#[unstable(feature = "manually_drop", issue = "40673")]
impl<T> ::ops::DerefMut for ManuallyDrop<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe {
&mut self.value
}
}
}
#[unstable(feature = "manually_drop", issue = "40673")]
impl<T: ::fmt::Debug> ::fmt::Debug for ManuallyDrop<T> {
fn fmt(&self, fmt: &mut ::fmt::Formatter) -> ::fmt::Result {
unsafe {
fmt.debug_tuple("ManuallyDrop").field(&self.value).finish()
}
}
}

View File

@ -20,12 +20,6 @@ use cmp;
use mem;
use ptr;
/// Holds a value, but never drops it.
#[allow(unions_with_drop_fields)]
union NoDrop<T> {
value: T
}
/// When dropped, copies from `src` into `dest`.
struct CopyOnDrop<T> {
src: *mut T,
@ -49,15 +43,15 @@ fn shift_head<T, F>(v: &mut [T], is_less: &mut F)
// Read the first element into a stack-allocated variable. If a following comparison
// operation panics, `hole` will get dropped and automatically write the element back
// into the slice.
let mut tmp = NoDrop { value: ptr::read(v.get_unchecked(0)) };
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0)));
let mut hole = CopyOnDrop {
src: &mut tmp.value,
src: &mut *tmp,
dest: v.get_unchecked_mut(1),
};
ptr::copy_nonoverlapping(v.get_unchecked(1), v.get_unchecked_mut(0), 1);
for i in 2..len {
if !is_less(v.get_unchecked(i), &tmp.value) {
if !is_less(v.get_unchecked(i), &*tmp) {
break;
}
@ -81,15 +75,15 @@ fn shift_tail<T, F>(v: &mut [T], is_less: &mut F)
// Read the last element into a stack-allocated variable. If a following comparison
// operation panics, `hole` will get dropped and automatically write the element back
// into the slice.
let mut tmp = NoDrop { value: ptr::read(v.get_unchecked(len - 1)) };
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1)));
let mut hole = CopyOnDrop {
src: &mut tmp.value,
src: &mut *tmp,
dest: v.get_unchecked_mut(len - 2),
};
ptr::copy_nonoverlapping(v.get_unchecked(len - 2), v.get_unchecked_mut(len - 1), 1);
for i in (0..len-2).rev() {
if !is_less(&tmp.value, v.get_unchecked(i)) {
if !is_less(&*tmp, v.get_unchecked(i)) {
break;
}
@ -403,12 +397,12 @@ fn partition<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool)
// Read the pivot into a stack-allocated variable for efficiency. If a following comparison
// operation panics, the pivot will be automatically written back into the slice.
let mut tmp = NoDrop { value: unsafe { ptr::read(pivot) } };
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
let _pivot_guard = CopyOnDrop {
src: unsafe { &mut tmp.value },
src: &mut *tmp,
dest: pivot,
};
let pivot = unsafe { &tmp.value };
let pivot = &*tmp;
// Find the first pair of out-of-order elements.
let mut l = 0;
@ -452,12 +446,12 @@ fn partition_equal<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> usize
// Read the pivot into a stack-allocated variable for efficiency. If a following comparison
// operation panics, the pivot will be automatically written back into the slice.
let mut tmp = NoDrop { value: unsafe { ptr::read(pivot) } };
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
let _pivot_guard = CopyOnDrop {
src: unsafe { &mut tmp.value },
src: &mut *tmp,
dest: pivot,
};
let pivot = unsafe { &tmp.value };
let pivot = &*tmp;
// Now partition the slice.
let mut l = 0;

View File

@ -20,10 +20,11 @@ use std::fmt;
use std::mem;
use std::collections::range::RangeArgument;
use std::collections::Bound::{Excluded, Included, Unbounded};
use std::mem::ManuallyDrop;
pub unsafe trait Array {
type Element;
type PartialStorage: Default + Unsize<[ManuallyDrop<Self::Element>]>;
type PartialStorage: Unsize<[ManuallyDrop<Self::Element>]>;
const LEN: usize;
}
@ -66,7 +67,7 @@ impl<A: Array> ArrayVec<A> {
pub fn new() -> Self {
ArrayVec {
count: 0,
values: Default::default(),
values: unsafe { ::std::mem::uninitialized() },
}
}
@ -81,7 +82,7 @@ impl<A: Array> ArrayVec<A> {
/// Panics when the stack vector is full.
pub fn push(&mut self, el: A::Element) {
let arr = &mut self.values as &mut [ManuallyDrop<_>];
arr[self.count] = ManuallyDrop { value: el };
arr[self.count] = ManuallyDrop::new(el);
self.count += 1;
}
@ -90,8 +91,8 @@ impl<A: Array> ArrayVec<A> {
let arr = &mut self.values as &mut [ManuallyDrop<_>];
self.count -= 1;
unsafe {
let value = ptr::read(&arr[self.count]);
Some(value.value)
let value = ptr::read(&*arr[self.count]);
Some(value)
}
} else {
None
@ -210,7 +211,7 @@ impl<A: Array> Iterator for Iter<A> {
fn next(&mut self) -> Option<A::Element> {
let arr = &self.store as &[ManuallyDrop<_>];
unsafe {
self.indices.next().map(|i| ptr::read(&arr[i]).value)
self.indices.next().map(|i| ptr::read(&*arr[i]))
}
}
@ -233,7 +234,7 @@ impl<'a, A: Array> Iterator for Drain<'a, A> {
#[inline]
fn next(&mut self) -> Option<A::Element> {
self.iter.next().map(|elt| unsafe { ptr::read(elt as *const ManuallyDrop<_>).value })
self.iter.next().map(|elt| unsafe { ptr::read(&**elt) })
}
fn size_hint(&self) -> (usize, Option<usize>) {
@ -295,25 +296,3 @@ impl<'a, A: Array> IntoIterator for &'a mut ArrayVec<A> {
self.iter_mut()
}
}
// FIXME: This should use repr(transparent) from rust-lang/rfcs#1758.
#[allow(unions_with_drop_fields)]
pub union ManuallyDrop<T> {
value: T,
#[allow(dead_code)]
empty: (),
}
impl<T> ManuallyDrop<T> {
fn new() -> ManuallyDrop<T> {
ManuallyDrop {
empty: ()
}
}
}
impl<T> Default for ManuallyDrop<T> {
fn default() -> Self {
ManuallyDrop::new()
}
}

View File

@ -39,6 +39,7 @@
#![feature(conservative_impl_trait)]
#![feature(discriminant_value)]
#![feature(specialization)]
#![feature(manually_drop)]
#![cfg_attr(unix, feature(libc))]
#![cfg_attr(test, feature(test))]

View File

@ -186,7 +186,7 @@ pub fn trans_intrinsic_call<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
C_nil(ccx)
}
// Effectively no-ops
"uninit" | "forget" => {
"uninit" => {
C_nil(ccx)
}
"needs_drop" => {

View File

@ -124,7 +124,6 @@ pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
"rustc_peek" => (1, vec![param(0)], param(0)),
"init" => (1, Vec::new(), param(0)),
"uninit" => (1, Vec::new(), param(0)),
"forget" => (1, vec![ param(0) ], tcx.mk_nil()),
"transmute" => (2, vec![ param(0) ], param(1)),
"move_val_init" => {
(1,

View File

@ -10,10 +10,9 @@
#![feature(core_intrinsics)]
use std::intrinsics::{init, forget};
use std::intrinsics::{init};
// Test that the `forget` and `init` intrinsics are really unsafe
pub fn main() {
let stuff = init::<isize>(); //~ ERROR call to unsafe function requires unsafe
forget(stuff); //~ ERROR call to unsafe function requires unsafe
}