std: Stabilize some `ptr` functions

Specifically, the following actions were taken:

* The `copy_memory` and `copy_nonoverlapping_memory` functions
  to drop the `_memory` suffix (as it's implied by the functionality). Both
  functions are now marked as `#[stable]`.
* The `set_memory` function was renamed to `write_bytes` and is now stable.
* The `zero_memory` function is now deprecated in favor of `write_bytes`
  directly.
* The `Unique` pointer type is now behind its own feature gate called `unique`
  to facilitate future stabilization.
* All type parameters now are `T: ?Sized` wherever possible and new clauses were
  added to the `offset` functions to require that the type is sized.

[breaking-change]
This commit is contained in:
Alex Crichton 2015-02-23 11:39:16 -08:00
parent 0bd15657d9
commit ab45694198
15 changed files with 126 additions and 122 deletions

View File

@ -73,6 +73,7 @@
#![feature(unboxed_closures)]
#![feature(unsafe_no_drop_flag)]
#![feature(core)]
#![feature(unique)]
#![cfg_attr(test, feature(test, alloc, rustc_private))]
#![cfg_attr(all(not(feature = "external_funcs"), not(feature = "external_crate")),
feature(libc))]

View File

@ -1136,12 +1136,12 @@ impl<K, V> Node<K, V> {
// This must be followed by insert_edge on an internal node.
#[inline]
unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
ptr::copy_memory(
ptr::copy(
self.keys_mut().as_mut_ptr().offset(index as isize + 1),
self.keys().as_ptr().offset(index as isize),
self.len() - index
);
ptr::copy_memory(
ptr::copy(
self.vals_mut().as_mut_ptr().offset(index as isize + 1),
self.vals().as_ptr().offset(index as isize),
self.len() - index
@ -1158,7 +1158,7 @@ impl<K, V> Node<K, V> {
// This can only be called immediately after a call to insert_kv.
#[inline]
unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
ptr::copy_memory(
ptr::copy(
self.edges_mut().as_mut_ptr().offset(index as isize + 1),
self.edges().as_ptr().offset(index as isize),
self.len() - index
@ -1191,12 +1191,12 @@ impl<K, V> Node<K, V> {
let key = ptr::read(self.keys().get_unchecked(index));
let val = ptr::read(self.vals().get_unchecked(index));
ptr::copy_memory(
ptr::copy(
self.keys_mut().as_mut_ptr().offset(index as isize),
self.keys().as_ptr().offset(index as isize + 1),
self.len() - index - 1
);
ptr::copy_memory(
ptr::copy(
self.vals_mut().as_mut_ptr().offset(index as isize),
self.vals().as_ptr().offset(index as isize + 1),
self.len() - index - 1
@ -1212,7 +1212,7 @@ impl<K, V> Node<K, V> {
unsafe fn remove_edge(&mut self, index: usize) -> Node<K, V> {
let edge = ptr::read(self.edges().get_unchecked(index));
ptr::copy_memory(
ptr::copy(
self.edges_mut().as_mut_ptr().offset(index as isize),
self.edges().as_ptr().offset(index as isize + 1),
self.len() - index + 1
@ -1239,18 +1239,18 @@ impl<K, V> Node<K, V> {
unsafe {
right._len = self.len() / 2;
let right_offset = self.len() - right.len();
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
right.keys_mut().as_mut_ptr(),
self.keys().as_ptr().offset(right_offset as isize),
right.len()
);
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
right.vals_mut().as_mut_ptr(),
self.vals().as_ptr().offset(right_offset as isize),
right.len()
);
if !self.is_leaf() {
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
right.edges_mut().as_mut_ptr(),
self.edges().as_ptr().offset(right_offset as isize),
right.len() + 1
@ -1280,18 +1280,18 @@ impl<K, V> Node<K, V> {
ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
right.keys().as_ptr(),
right.len()
);
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
right.vals().as_ptr(),
right.len()
);
if !self.is_leaf() {
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
self.edges_mut().as_mut_ptr().offset(old_len as isize + 1),
right.edges().as_ptr(),
right.len() + 1

View File

@ -30,6 +30,7 @@
#![feature(unboxed_closures)]
#![feature(unicode)]
#![feature(unsafe_destructor)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag)]
#![cfg_attr(test, feature(rand, rustc_private, test))]
#![cfg_attr(test, allow(deprecated))] // rand

View File

@ -1331,12 +1331,10 @@ fn insertion_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> O
if i != j {
let tmp = ptr::read(read_ptr);
ptr::copy_memory(buf_v.offset(j + 1),
&*buf_v.offset(j),
(i - j) as usize);
ptr::copy_nonoverlapping_memory(buf_v.offset(j),
&tmp,
1);
ptr::copy(buf_v.offset(j + 1),
&*buf_v.offset(j),
(i - j) as usize);
ptr::copy_nonoverlapping(buf_v.offset(j), &tmp, 1);
mem::forget(tmp);
}
}
@ -1409,10 +1407,10 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
// j + 1 could be `len` (for the last `i`), but in
// that case, `i == j` so we don't copy. The
// `.offset(j)` is always in bounds.
ptr::copy_memory(buf_dat.offset(j + 1),
&*buf_dat.offset(j),
i - j as usize);
ptr::copy_nonoverlapping_memory(buf_dat.offset(j), read_ptr, 1);
ptr::copy(buf_dat.offset(j + 1),
&*buf_dat.offset(j),
i - j as usize);
ptr::copy_nonoverlapping(buf_dat.offset(j), read_ptr, 1);
}
}
}
@ -1460,11 +1458,11 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
if left == right_start {
// the number remaining in this run.
let elems = (right_end as usize - right as usize) / mem::size_of::<T>();
ptr::copy_nonoverlapping_memory(out, &*right, elems);
ptr::copy_nonoverlapping(out, &*right, elems);
break;
} else if right == right_end {
let elems = (right_start as usize - left as usize) / mem::size_of::<T>();
ptr::copy_nonoverlapping_memory(out, &*left, elems);
ptr::copy_nonoverlapping(out, &*left, elems);
break;
}
@ -1478,7 +1476,7 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
} else {
step(&mut left)
};
ptr::copy_nonoverlapping_memory(out, &*to_copy, 1);
ptr::copy_nonoverlapping(out, &*to_copy, 1);
step(&mut out);
}
}
@ -1492,7 +1490,7 @@ fn merge_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order
// write the result to `v` in one go, so that there are never two copies
// of the same object in `v`.
unsafe {
ptr::copy_nonoverlapping_memory(v.as_mut_ptr(), &*buf_dat, len);
ptr::copy_nonoverlapping(v.as_mut_ptr(), &*buf_dat, len);
}
// increment the pointer, returning the old pointer.

View File

@ -568,9 +568,9 @@ impl String {
let CharRange { ch, next } = self.char_range_at(idx);
unsafe {
ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as isize),
self.vec.as_ptr().offset(next as isize),
len - next);
ptr::copy(self.vec.as_mut_ptr().offset(idx as isize),
self.vec.as_ptr().offset(next as isize),
len - next);
self.vec.set_len(len - (next - idx));
}
ch
@ -598,12 +598,12 @@ impl String {
let amt = ch.encode_utf8(&mut bits).unwrap();
unsafe {
ptr::copy_memory(self.vec.as_mut_ptr().offset((idx + amt) as isize),
self.vec.as_ptr().offset(idx as isize),
len - idx);
ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as isize),
bits.as_ptr(),
amt);
ptr::copy(self.vec.as_mut_ptr().offset((idx + amt) as isize),
self.vec.as_ptr().offset(idx as isize),
len - idx);
ptr::copy(self.vec.as_mut_ptr().offset(idx as isize),
bits.as_ptr(),
amt);
self.vec.set_len(len + amt);
}
}

View File

@ -267,7 +267,7 @@ impl<T> Vec<T> {
pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> {
let mut dst = Vec::with_capacity(elts);
dst.set_len(elts);
ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
ptr::copy_nonoverlapping(dst.as_mut_ptr(), ptr, elts);
dst
}
@ -548,7 +548,7 @@ impl<T> Vec<T> {
let p = self.as_mut_ptr().offset(index as isize);
// Shift everything over to make space. (Duplicating the
// `index`th element into two consecutive places.)
ptr::copy_memory(p.offset(1), &*p, len - index);
ptr::copy(p.offset(1), &*p, len - index);
// Write it in, overwriting the first copy of the `index`th
// element.
ptr::write(&mut *p, element);
@ -585,7 +585,7 @@ impl<T> Vec<T> {
ret = ptr::read(ptr);
// Shift everything down to fill in that spot.
ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
ptr::copy(ptr, &*ptr.offset(1), len - index - 1);
}
self.set_len(len - 1);
ret
@ -718,7 +718,7 @@ impl<T> Vec<T> {
self.reserve(other.len());
let len = self.len();
unsafe {
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
self.get_unchecked_mut(len),
other.as_ptr(),
other.len());
@ -1036,7 +1036,7 @@ impl<T> Vec<T> {
self.set_len(at);
other.set_len(other_len);
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
other.as_mut_ptr(),
self.as_ptr().offset(at as isize),
other.len());

View File

@ -134,7 +134,7 @@ impl<T> VecDeque<T> {
self.cap);
debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap);
ptr::copy_memory(
ptr::copy(
self.ptr.offset(dst as isize),
self.ptr.offset(src as isize),
len);
@ -147,7 +147,7 @@ impl<T> VecDeque<T> {
self.cap);
debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap);
ptr::copy_nonoverlapping_memory(
ptr::copy_nonoverlapping(
self.ptr.offset(dst as isize),
self.ptr.offset(src as isize),
len);
@ -1343,22 +1343,22 @@ impl<T> VecDeque<T> {
// `at` lies in the first half.
let amount_in_first = first_len - at;
ptr::copy_nonoverlapping_memory(*other.ptr,
first_half.as_ptr().offset(at as isize),
amount_in_first);
ptr::copy_nonoverlapping(*other.ptr,
first_half.as_ptr().offset(at as isize),
amount_in_first);
// just take all of the second half.
ptr::copy_nonoverlapping_memory(other.ptr.offset(amount_in_first as isize),
second_half.as_ptr(),
second_len);
ptr::copy_nonoverlapping(other.ptr.offset(amount_in_first as isize),
second_half.as_ptr(),
second_len);
} else {
// `at` lies in the second half, need to factor in the elements we skipped
// in the first half.
let offset = at - first_len;
let amount_in_second = second_len - offset;
ptr::copy_nonoverlapping_memory(*other.ptr,
second_half.as_ptr().offset(offset as isize),
amount_in_second);
ptr::copy_nonoverlapping(*other.ptr,
second_half.as_ptr().offset(offset as isize),
amount_in_second);
}
}

View File

@ -293,7 +293,7 @@ extern "rust-intrinsic" {
/// }
/// }
/// ```
#[unstable(feature = "core")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T, count: usize);
/// Copies `count * size_of<T>` bytes from `src` to `dst`. The source
@ -323,13 +323,12 @@ extern "rust-intrinsic" {
/// }
/// ```
///
#[unstable(feature = "core")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn copy_memory<T>(dst: *mut T, src: *const T, count: usize);
/// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
/// bytes of memory starting at `dst` to `c`.
#[unstable(feature = "core",
reason = "uncertain about naming and semantics")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn set_memory<T>(dst: *mut T, val: u8, count: usize);
/// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with

View File

@ -203,9 +203,9 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
let mut t: T = uninitialized();
// Perform the swap, `&mut` pointers never alias
ptr::copy_nonoverlapping_memory(&mut t, &*x, 1);
ptr::copy_nonoverlapping_memory(x, &*y, 1);
ptr::copy_nonoverlapping_memory(y, &t, 1);
ptr::copy_nonoverlapping(&mut t, &*x, 1);
ptr::copy_nonoverlapping(x, &*y, 1);
ptr::copy_nonoverlapping(y, &t, 1);
// y and t now point to the same thing, but we need to completely forget `t`
// because it's no longer relevant.

View File

@ -101,16 +101,28 @@ use cmp::Ordering::{self, Less, Equal, Greater};
// FIXME #19649: intrinsic docs don't render, so these have no docs :(
#[unstable(feature = "core")]
pub use intrinsics::copy_nonoverlapping_memory;
#[stable(feature = "rust1", since = "1.0.0")]
pub use intrinsics::copy_nonoverlapping_memory as copy_nonoverlapping;
#[unstable(feature = "core")]
pub use intrinsics::copy_memory;
#[stable(feature = "rust1", since = "1.0.0")]
pub use intrinsics::copy_memory as copy;
#[unstable(feature = "core",
reason = "uncertain about naming and semantics")]
pub use intrinsics::set_memory;
#[stable(feature = "rust1", since = "1.0.0")]
pub use intrinsics::set_memory as write_bytes;
extern "rust-intrinsic" {
#[unstable(feature = "core")]
#[deprecated(since = "1.0.0", reason = "renamed to `copy_nonoverlapping`")]
pub fn copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T, count: usize);
#[unstable(feature = "core")]
#[deprecated(since = "1.0.0", reason = "renamed to `copy`")]
pub fn copy_memory<T>(dst: *mut T, src: *const T, count: usize);
#[unstable(feature = "core",
reason = "uncertain about naming and semantics")]
#[deprecated(since = "1.0.0", reason = "renamed to `write_bytes`")]
pub fn set_memory<T>(dst: *mut T, val: u8, count: usize);
}
/// Creates a null raw pointer.
///
@ -150,8 +162,9 @@ pub fn null_mut<T>() -> *mut T { 0 as *mut T }
#[inline]
#[unstable(feature = "core",
reason = "may play a larger role in std::ptr future extensions")]
#[deprecated(since = "1.0.0", reason = "use `write_bytes` instead")]
pub unsafe fn zero_memory<T>(dst: *mut T, count: usize) {
set_memory(dst, 0, count);
write_bytes(dst, 0, count);
}
/// Swaps the values at two mutable locations of the same type, without
@ -169,9 +182,9 @@ pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
let t: *mut T = &mut tmp;
// Perform the swap
copy_nonoverlapping_memory(t, &*x, 1);
copy_memory(x, &*y, 1); // `x` and `y` may overlap
copy_nonoverlapping_memory(y, &*t, 1);
copy_nonoverlapping(t, &*x, 1);
copy(x, &*y, 1); // `x` and `y` may overlap
copy_nonoverlapping(y, &*t, 1);
// y and t now point to the same thing, but we need to completely forget `tmp`
// because it's no longer relevant.
@ -207,7 +220,7 @@ pub unsafe fn replace<T>(dest: *mut T, mut src: T) -> T {
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn read<T>(src: *const T) -> T {
let mut tmp: T = mem::uninitialized();
copy_nonoverlapping_memory(&mut tmp, src, 1);
copy_nonoverlapping(&mut tmp, src, 1);
tmp
}
@ -224,7 +237,7 @@ pub unsafe fn read_and_zero<T>(dest: *mut T) -> T {
let tmp = read(&*dest);
// Now zero out `dest`:
zero_memory(dest, 1);
write_bytes(dest, 0, 1);
tmp
}
@ -248,9 +261,9 @@ pub unsafe fn write<T>(dst: *mut T, src: T) {
/// Methods on raw pointers
#[stable(feature = "rust1", since = "1.0.0")]
pub trait PtrExt: Sized {
pub trait PtrExt {
/// The type which is being pointed at
type Target;
type Target: ?Sized;
/// Returns true if the pointer is null.
#[stable(feature = "rust1", since = "1.0.0")]
@ -279,14 +292,14 @@ pub trait PtrExt: Sized {
/// Otherwise `offset` invokes Undefined Behaviour, regardless of whether
/// the pointer is used.
#[stable(feature = "rust1", since = "1.0.0")]
unsafe fn offset(self, count: isize) -> Self;
unsafe fn offset(self, count: isize) -> Self where Self::Target: Sized;
}
/// Methods on mutable raw pointers
#[stable(feature = "rust1", since = "1.0.0")]
pub trait MutPtrExt {
/// The type which is being pointed at
type Target;
type Target: ?Sized;
/// Returns `None` if the pointer is null, or else returns a mutable
/// reference to the value wrapped in `Some`.
@ -302,7 +315,7 @@ pub trait MutPtrExt {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> PtrExt for *const T {
impl<T: ?Sized> PtrExt for *const T {
type Target = T;
#[inline]
@ -311,7 +324,7 @@ impl<T> PtrExt for *const T {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
unsafe fn offset(self, count: isize) -> *const T {
unsafe fn offset(self, count: isize) -> *const T where T: Sized {
intrinsics::offset(self, count)
}
@ -329,7 +342,7 @@ impl<T> PtrExt for *const T {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> PtrExt for *mut T {
impl<T: ?Sized> PtrExt for *mut T {
type Target = T;
#[inline]
@ -338,7 +351,7 @@ impl<T> PtrExt for *mut T {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
unsafe fn offset(self, count: isize) -> *mut T {
unsafe fn offset(self, count: isize) -> *mut T where T: Sized {
intrinsics::offset(self, count) as *mut T
}
@ -356,7 +369,7 @@ impl<T> PtrExt for *mut T {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> MutPtrExt for *mut T {
impl<T: ?Sized> MutPtrExt for *mut T {
type Target = T;
#[inline]
@ -374,33 +387,25 @@ impl<T> MutPtrExt for *mut T {
// Equality for pointers
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> PartialEq for *const T {
impl<T: ?Sized> PartialEq for *const T {
#[inline]
fn eq(&self, other: &*const T) -> bool {
*self == *other
}
#[inline]
fn ne(&self, other: &*const T) -> bool { !self.eq(other) }
fn eq(&self, other: &*const T) -> bool { *self == *other }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Eq for *const T {}
impl<T: ?Sized> Eq for *const T {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> PartialEq for *mut T {
impl<T: ?Sized> PartialEq for *mut T {
#[inline]
fn eq(&self, other: &*mut T) -> bool {
*self == *other
}
#[inline]
fn ne(&self, other: &*mut T) -> bool { !self.eq(other) }
fn eq(&self, other: &*mut T) -> bool { *self == *other }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Eq for *mut T {}
impl<T: ?Sized> Eq for *mut T {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for *const T {
impl<T: ?Sized> Clone for *const T {
#[inline]
fn clone(&self) -> *const T {
*self
@ -408,7 +413,7 @@ impl<T> Clone for *const T {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for *mut T {
impl<T: ?Sized> Clone for *mut T {
#[inline]
fn clone(&self) -> *mut T {
*self
@ -452,7 +457,7 @@ mod externfnpointers {
// Comparison for pointers
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Ord for *const T {
impl<T: ?Sized> Ord for *const T {
#[inline]
fn cmp(&self, other: &*const T) -> Ordering {
if self < other {
@ -466,7 +471,7 @@ impl<T> Ord for *const T {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> PartialOrd for *const T {
impl<T: ?Sized> PartialOrd for *const T {
#[inline]
fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
Some(self.cmp(other))
@ -486,7 +491,7 @@ impl<T> PartialOrd for *const T {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Ord for *mut T {
impl<T: ?Sized> Ord for *mut T {
#[inline]
fn cmp(&self, other: &*mut T) -> Ordering {
if self < other {
@ -500,7 +505,7 @@ impl<T> Ord for *mut T {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> PartialOrd for *mut T {
impl<T: ?Sized> PartialOrd for *mut T {
#[inline]
fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
Some(self.cmp(other))
@ -527,8 +532,8 @@ impl<T> PartialOrd for *mut T {
/// modified without a unique path to the `Unique` reference. Useful
/// for building abstractions like `Vec<T>` or `Box<T>`, which
/// internally use raw pointers to manage the memory that they own.
#[unstable(feature = "core", reason = "recently added to this module")]
pub struct Unique<T:?Sized> {
#[unstable(feature = "unique")]
pub struct Unique<T: ?Sized> {
pointer: NonZero<*const T>,
_marker: PhantomData<T>,
}
@ -537,39 +542,37 @@ pub struct Unique<T:?Sized> {
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "core", reason = "recently added to this module")]
#[unstable(feature = "unique")]
unsafe impl<T: Send + ?Sized> Send for Unique<T> { }
/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "core", reason = "recently added to this module")]
#[unstable(feature = "unique")]
unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { }
impl<T:?Sized> Unique<T> {
impl<T: ?Sized> Unique<T> {
/// Create a new `Unique`.
#[unstable(feature = "core",
reason = "recently added to this module")]
#[unstable(feature = "unique")]
pub unsafe fn new(ptr: *mut T) -> Unique<T> {
Unique { pointer: NonZero::new(ptr as *const T), _marker: PhantomData }
}
/// Dereference the content.
#[unstable(feature = "core",
reason = "recently added to this module")]
#[unstable(feature = "unique")]
pub unsafe fn get(&self) -> &T {
&**self.pointer
}
/// Mutably dereference the content.
#[unstable(feature = "core",
reason = "recently added to this module")]
#[unstable(feature = "unique")]
pub unsafe fn get_mut(&mut self) -> &mut T {
&mut ***self
}
}
#[unstable(feature = "unique")]
impl<T:?Sized> Deref for Unique<T> {
type Target = *mut T;

View File

@ -1500,7 +1500,7 @@ pub mod bytes {
impl MutableByteVector for [u8] {
#[inline]
fn set_memory(&mut self, value: u8) {
unsafe { ptr::set_memory(self.as_mut_ptr(), value, self.len()) };
unsafe { ptr::write_bytes(self.as_mut_ptr(), value, self.len()) };
}
}
@ -1514,9 +1514,9 @@ pub mod bytes {
// `dst` is unaliasable, so we know statically it doesn't overlap
// with `src`.
unsafe {
ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(),
src.as_ptr(),
len_src);
ptr::copy_nonoverlapping(dst.as_mut_ptr(),
src.as_ptr(),
len_src);
}
}
}

View File

@ -27,6 +27,7 @@
#![feature(int_uint)]
#![feature(libc)]
#![feature(staged_api)]
#![feature(unique)]
#[cfg(test)] #[macro_use] extern crate log;

View File

@ -23,7 +23,7 @@ use num::{Int, UnsignedInt};
use ops::{Deref, DerefMut, Drop};
use option::Option;
use option::Option::{Some, None};
use ptr::{self, PtrExt, copy_nonoverlapping_memory, Unique, zero_memory};
use ptr::{self, PtrExt, Unique};
use rt::heap::{allocate, deallocate, EMPTY};
use collections::hash_state::HashState;
@ -477,8 +477,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> GapThenFull<K, V, M> {
pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
unsafe {
*self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key, 1);
copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val, 1);
ptr::copy_nonoverlapping(self.gap.raw.key, self.full.raw.key, 1);
ptr::copy_nonoverlapping(self.gap.raw.val, self.full.raw.val, 1);
}
let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;
@ -637,7 +637,7 @@ impl<K, V> RawTable<K, V> {
pub fn new(capacity: usize) -> RawTable<K, V> {
unsafe {
let ret = RawTable::new_uninitialized(capacity);
zero_memory(*ret.hashes, capacity);
ptr::write_bytes(*ret.hashes, 0, capacity);
ret
}
}

View File

@ -155,9 +155,9 @@ impl<W: Write> BufWriter<W> {
if written > 0 {
// NB: would be better expressed as .remove(0..n) if it existed
unsafe {
ptr::copy_memory(self.buf.as_mut_ptr(),
self.buf.as_ptr().offset(written as isize),
len - written);
ptr::copy(self.buf.as_mut_ptr(),
self.buf.as_ptr().offset(written as isize),
len - written);
}
}
self.buf.truncate(len - written);

View File

@ -123,6 +123,7 @@
#![feature(unsafe_no_drop_flag)]
#![feature(macro_reexport)]
#![feature(hash)]
#![feature(unique)]
#![cfg_attr(test, feature(test, rustc_private, env))]
// Don't link to std. We are std.