Rollup merge of #21375 - petrochenkov:ssbsl, r=alexcrichton
After PR #19766 added implicit coersions `*mut T -> *const T`, the explicit casts can be removed. (The number of such casts turned out to be relatively small).
This commit is contained in:
commit
a79f1921a9
|
@ -254,7 +254,7 @@ impl<T: Send> Drop for Unique<T> {
|
||||||
// Copy the object out from the pointer onto the stack,
|
// Copy the object out from the pointer onto the stack,
|
||||||
// where it is covered by normal Rust destructor semantics
|
// where it is covered by normal Rust destructor semantics
|
||||||
// and cleans itself up, if necessary
|
// and cleans itself up, if necessary
|
||||||
ptr::read(self.ptr as *const T);
|
ptr::read(self.ptr);
|
||||||
|
|
||||||
// clean-up our allocation
|
// clean-up our allocation
|
||||||
free(self.ptr as *mut c_void)
|
free(self.ptr as *mut c_void)
|
||||||
|
|
|
@ -298,7 +298,7 @@ mod imp {
|
||||||
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
|
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
|
||||||
} else {
|
} else {
|
||||||
let new_ptr = allocate(size, align);
|
let new_ptr = allocate(size, align);
|
||||||
ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
|
ptr::copy_memory(new_ptr, ptr, cmp::min(size, old_size));
|
||||||
deallocate(ptr, old_size, align);
|
deallocate(ptr, old_size, align);
|
||||||
new_ptr
|
new_ptr
|
||||||
}
|
}
|
||||||
|
|
|
@ -344,11 +344,11 @@ impl<K, V> Node<K, V> {
|
||||||
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
|
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
|
||||||
unsafe {(
|
unsafe {(
|
||||||
mem::transmute(raw::Slice {
|
mem::transmute(raw::Slice {
|
||||||
data: self.keys.0 as *const K,
|
data: self.keys.0,
|
||||||
len: self.len()
|
len: self.len()
|
||||||
}),
|
}),
|
||||||
mem::transmute(raw::Slice {
|
mem::transmute(raw::Slice {
|
||||||
data: self.vals.0 as *const V,
|
data: self.vals.0,
|
||||||
len: self.len()
|
len: self.len()
|
||||||
})
|
})
|
||||||
)}
|
)}
|
||||||
|
@ -368,7 +368,7 @@ impl<K, V> Node<K, V> {
|
||||||
} else {
|
} else {
|
||||||
unsafe {
|
unsafe {
|
||||||
mem::transmute(raw::Slice {
|
mem::transmute(raw::Slice {
|
||||||
data: self.edges.0 as *const Node<K, V>,
|
data: self.edges.0,
|
||||||
len: self.len() + 1
|
len: self.len() + 1
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,19 +88,19 @@ impl<T> RingBuf<T> {
|
||||||
/// Turn ptr into a slice
|
/// Turn ptr into a slice
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn buffer_as_slice(&self) -> &[T] {
|
unsafe fn buffer_as_slice(&self) -> &[T] {
|
||||||
mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
|
mem::transmute(RawSlice { data: self.ptr, len: self.cap })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Turn ptr into a mut slice
|
/// Turn ptr into a mut slice
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
|
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
|
||||||
mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
|
mem::transmute(RawSlice { data: self.ptr, len: self.cap })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Moves an element out of the buffer
|
/// Moves an element out of the buffer
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn buffer_read(&mut self, off: uint) -> T {
|
unsafe fn buffer_read(&mut self, off: uint) -> T {
|
||||||
ptr::read(self.ptr.offset(off as int) as *const T)
|
ptr::read(self.ptr.offset(off as int))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Writes an element into the buffer, moving it.
|
/// Writes an element into the buffer, moving it.
|
||||||
|
|
|
@ -1222,7 +1222,7 @@ fn insertion_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> O
|
||||||
&*buf_v.offset(j),
|
&*buf_v.offset(j),
|
||||||
(i - j) as uint);
|
(i - j) as uint);
|
||||||
ptr::copy_nonoverlapping_memory(buf_v.offset(j),
|
ptr::copy_nonoverlapping_memory(buf_v.offset(j),
|
||||||
&tmp as *const T,
|
&tmp,
|
||||||
1);
|
1);
|
||||||
mem::forget(tmp);
|
mem::forget(tmp);
|
||||||
}
|
}
|
||||||
|
|
|
@ -426,7 +426,7 @@ impl<T> Vec<T> {
|
||||||
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
|
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
|
||||||
unsafe {
|
unsafe {
|
||||||
mem::transmute(RawSlice {
|
mem::transmute(RawSlice {
|
||||||
data: *self.ptr as *const T,
|
data: *self.ptr,
|
||||||
len: self.len,
|
len: self.len,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -574,7 +574,7 @@ impl<T> Vec<T> {
|
||||||
let ptr = self.as_mut_ptr().offset(index as int);
|
let ptr = self.as_mut_ptr().offset(index as int);
|
||||||
// copy it out, unsafely having a copy of the value on
|
// copy it out, unsafely having a copy of the value on
|
||||||
// the stack and in the vector at the same time.
|
// the stack and in the vector at the same time.
|
||||||
ret = ptr::read(ptr as *const T);
|
ret = ptr::read(ptr);
|
||||||
|
|
||||||
// Shift everything down to fill in that spot.
|
// Shift everything down to fill in that spot.
|
||||||
ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
|
ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
|
||||||
|
@ -879,7 +879,7 @@ impl<T> Vec<T> {
|
||||||
// | |
|
// | |
|
||||||
// end_u end_t
|
// end_u end_t
|
||||||
|
|
||||||
let t = ptr::read(pv.start_t as *const T);
|
let t = ptr::read(pv.start_t);
|
||||||
// start_u start_t
|
// start_u start_t
|
||||||
// | |
|
// | |
|
||||||
// +-+-+-+-+-+-+-+-+-+
|
// +-+-+-+-+-+-+-+-+-+
|
||||||
|
@ -1443,7 +1443,7 @@ impl<T> AsSlice<T> for Vec<T> {
|
||||||
fn as_slice<'a>(&'a self) -> &'a [T] {
|
fn as_slice<'a>(&'a self) -> &'a [T] {
|
||||||
unsafe {
|
unsafe {
|
||||||
mem::transmute(RawSlice {
|
mem::transmute(RawSlice {
|
||||||
data: *self.ptr as *const T,
|
data: *self.ptr,
|
||||||
len: self.len
|
len: self.len
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1806,11 +1806,11 @@ impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
|
||||||
|
|
||||||
// We have instances of `U`s and `T`s in `vec`. Destruct them.
|
// We have instances of `U`s and `T`s in `vec`. Destruct them.
|
||||||
while self.start_u != self.end_u {
|
while self.start_u != self.end_u {
|
||||||
let _ = ptr::read(self.start_u as *const U); // Run a `U` destructor.
|
let _ = ptr::read(self.start_u); // Run a `U` destructor.
|
||||||
self.start_u = self.start_u.offset(1);
|
self.start_u = self.start_u.offset(1);
|
||||||
}
|
}
|
||||||
while self.start_t != self.end_t {
|
while self.start_t != self.end_t {
|
||||||
let _ = ptr::read(self.start_t as *const T); // Run a `T` destructor.
|
let _ = ptr::read(self.start_t); // Run a `T` destructor.
|
||||||
self.start_t = self.start_t.offset(1);
|
self.start_t = self.start_t.offset(1);
|
||||||
}
|
}
|
||||||
// After this destructor ran, the destructor of `vec` will run,
|
// After this destructor ran, the destructor of `vec` will run,
|
||||||
|
|
|
@ -199,7 +199,7 @@ impl AtomicBool {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable]
|
#[stable]
|
||||||
pub fn load(&self, order: Ordering) -> bool {
|
pub fn load(&self, order: Ordering) -> bool {
|
||||||
unsafe { atomic_load(self.v.get() as *const usize, order) > 0 }
|
unsafe { atomic_load(self.v.get(), order) > 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stores a value into the bool.
|
/// Stores a value into the bool.
|
||||||
|
@ -438,7 +438,7 @@ impl AtomicIsize {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn load(&self, order: Ordering) -> isize {
|
pub fn load(&self, order: Ordering) -> isize {
|
||||||
unsafe { atomic_load(self.v.get() as *const isize, order) }
|
unsafe { atomic_load(self.v.get(), order) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stores a value into the isize.
|
/// Stores a value into the isize.
|
||||||
|
@ -615,7 +615,7 @@ impl AtomicUsize {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn load(&self, order: Ordering) -> usize {
|
pub fn load(&self, order: Ordering) -> usize {
|
||||||
unsafe { atomic_load(self.v.get() as *const usize, order) }
|
unsafe { atomic_load(self.v.get(), order) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stores a value into the usize.
|
/// Stores a value into the usize.
|
||||||
|
@ -796,7 +796,7 @@ impl<T> AtomicPtr<T> {
|
||||||
#[stable]
|
#[stable]
|
||||||
pub fn load(&self, order: Ordering) -> *mut T {
|
pub fn load(&self, order: Ordering) -> *mut T {
|
||||||
unsafe {
|
unsafe {
|
||||||
atomic_load(self.p.get() as *const *mut T, order) as *mut T
|
atomic_load(self.p.get(), order) as *mut T
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1070,7 +1070,7 @@ impl AtomicInt {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn load(&self, order: Ordering) -> int {
|
pub fn load(&self, order: Ordering) -> int {
|
||||||
unsafe { atomic_load(self.v.get() as *const int, order) }
|
unsafe { atomic_load(self.v.get(), order) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -1123,7 +1123,7 @@ impl AtomicUint {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn load(&self, order: Ordering) -> uint {
|
pub fn load(&self, order: Ordering) -> uint {
|
||||||
unsafe { atomic_load(self.v.get() as *const uint, order) }
|
unsafe { atomic_load(self.v.get(), order) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -329,7 +329,7 @@ impl<T> PtrExt for *mut T {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable]
|
#[stable]
|
||||||
unsafe fn offset(self, count: int) -> *mut T {
|
unsafe fn offset(self, count: int) -> *mut T {
|
||||||
intrinsics::offset(self as *const T, count) as *mut T
|
intrinsics::offset(self, count) as *mut T
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -741,7 +741,7 @@ macro_rules! make_slice {
|
||||||
diff / mem::size_of::<$t>()
|
diff / mem::size_of::<$t>()
|
||||||
};
|
};
|
||||||
unsafe {
|
unsafe {
|
||||||
transmute::<_, $result>(RawSlice { data: $start as *const T, len: len })
|
transmute::<_, $result>(RawSlice { data: $start, len: len })
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
@ -1409,7 +1409,7 @@ pub unsafe fn from_raw_buf<'a, T>(p: &'a *const T, len: uint) -> &'a [T] {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[unstable = "should be renamed to from_raw_parts_mut"]
|
#[unstable = "should be renamed to from_raw_parts_mut"]
|
||||||
pub unsafe fn from_raw_mut_buf<'a, T>(p: &'a *mut T, len: uint) -> &'a mut [T] {
|
pub unsafe fn from_raw_mut_buf<'a, T>(p: &'a *mut T, len: uint) -> &'a mut [T] {
|
||||||
transmute(RawSlice { data: *p as *const T, len: len })
|
transmute(RawSlice { data: *p, len: len })
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
|
|
|
@ -33,7 +33,7 @@ pub struct Builder<'a, 'tcx: 'a> {
|
||||||
// lot more efficient) than doing str::as_c_str("", ...) every time.
|
// lot more efficient) than doing str::as_c_str("", ...) every time.
|
||||||
pub fn noname() -> *const c_char {
|
pub fn noname() -> *const c_char {
|
||||||
static CNULL: c_char = 0;
|
static CNULL: c_char = 0;
|
||||||
&CNULL as *const c_char
|
&CNULL
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> Builder<'a, 'tcx> {
|
impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
|
|
|
@ -128,7 +128,7 @@ mod imp {
|
||||||
l_sysid: 0,
|
l_sysid: 0,
|
||||||
};
|
};
|
||||||
let ret = unsafe {
|
let ret = unsafe {
|
||||||
libc::fcntl(fd, os::F_SETLKW, &flock as *const os::flock)
|
libc::fcntl(fd, os::F_SETLKW, &flock)
|
||||||
};
|
};
|
||||||
if ret == -1 {
|
if ret == -1 {
|
||||||
let errno = stdos::errno();
|
let errno = stdos::errno();
|
||||||
|
@ -151,7 +151,7 @@ mod imp {
|
||||||
l_sysid: 0,
|
l_sysid: 0,
|
||||||
};
|
};
|
||||||
unsafe {
|
unsafe {
|
||||||
libc::fcntl(self.fd, os::F_SETLK, &flock as *const os::flock);
|
libc::fcntl(self.fd, os::F_SETLK, &flock);
|
||||||
libc::close(self.fd);
|
libc::close(self.fd);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -395,9 +395,6 @@ impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> FullBucket<K, V, M> {
|
||||||
/// This works similarly to `put`, building an `EmptyBucket` out of the
|
/// This works similarly to `put`, building an `EmptyBucket` out of the
|
||||||
/// taken bucket.
|
/// taken bucket.
|
||||||
pub fn take(mut self) -> (EmptyBucket<K, V, M>, K, V) {
|
pub fn take(mut self) -> (EmptyBucket<K, V, M>, K, V) {
|
||||||
let key = self.raw.key as *const K;
|
|
||||||
let val = self.raw.val as *const V;
|
|
||||||
|
|
||||||
self.table.size -= 1;
|
self.table.size -= 1;
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -408,8 +405,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> FullBucket<K, V, M> {
|
||||||
idx: self.idx,
|
idx: self.idx,
|
||||||
table: self.table
|
table: self.table
|
||||||
},
|
},
|
||||||
ptr::read(key),
|
ptr::read(self.raw.key),
|
||||||
ptr::read(val)
|
ptr::read(self.raw.val)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -477,8 +474,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> GapThenFull<K, V, M> {
|
||||||
pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
|
pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
|
||||||
unsafe {
|
unsafe {
|
||||||
*self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
|
*self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
|
||||||
copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key as *const K, 1);
|
copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key, 1);
|
||||||
copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val as *const V, 1);
|
copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;
|
let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;
|
||||||
|
@ -781,8 +778,8 @@ impl<'a, K, V> Iterator for RevMoveBuckets<'a, K, V> {
|
||||||
if *self.raw.hash != EMPTY_BUCKET {
|
if *self.raw.hash != EMPTY_BUCKET {
|
||||||
self.elems_left -= 1;
|
self.elems_left -= 1;
|
||||||
return Some((
|
return Some((
|
||||||
ptr::read(self.raw.key as *const K),
|
ptr::read(self.raw.key),
|
||||||
ptr::read(self.raw.val as *const V)
|
ptr::read(self.raw.val)
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -878,8 +875,8 @@ impl<K, V> Iterator for IntoIter<K, V> {
|
||||||
SafeHash {
|
SafeHash {
|
||||||
hash: *bucket.hash,
|
hash: *bucket.hash,
|
||||||
},
|
},
|
||||||
ptr::read(bucket.key as *const K),
|
ptr::read(bucket.key),
|
||||||
ptr::read(bucket.val as *const V)
|
ptr::read(bucket.val)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -906,8 +903,8 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> {
|
||||||
SafeHash {
|
SafeHash {
|
||||||
hash: ptr::replace(bucket.hash, EMPTY_BUCKET),
|
hash: ptr::replace(bucket.hash, EMPTY_BUCKET),
|
||||||
},
|
},
|
||||||
ptr::read(bucket.key as *const K),
|
ptr::read(bucket.key),
|
||||||
ptr::read(bucket.val as *const V)
|
ptr::read(bucket.val)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -229,7 +229,7 @@ fn print(w: &mut Writer, idx: int, addr: *mut libc::c_void) -> IoResult<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut info: Dl_info = unsafe { intrinsics::init() };
|
let mut info: Dl_info = unsafe { intrinsics::init() };
|
||||||
if unsafe { dladdr(addr as *const libc::c_void, &mut info) == 0 } {
|
if unsafe { dladdr(addr, &mut info) == 0 } {
|
||||||
output(w, idx,addr, None)
|
output(w, idx,addr, None)
|
||||||
} else {
|
} else {
|
||||||
output(w, idx, addr, Some(unsafe {
|
output(w, idx, addr, Some(unsafe {
|
||||||
|
|
|
@ -449,7 +449,7 @@ mod imp {
|
||||||
// destructor as running for this thread so calls to `get` will return
|
// destructor as running for this thread so calls to `get` will return
|
||||||
// `None`.
|
// `None`.
|
||||||
*(*ptr).dtor_running.get() = true;
|
*(*ptr).dtor_running.get() = true;
|
||||||
ptr::read((*ptr).inner.get() as *const T);
|
ptr::read((*ptr).inner.get());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue