Auto merge of #38488 - srinivasreddy:rf_collections, r=aturon
run rustfmt on libcollections folder
This commit is contained in:
commit
5fbf3bf841
@ -225,7 +225,7 @@ pub struct BinaryHeap<T> {
|
||||
/// [`peek_mut()`]: struct.BinaryHeap.html#method.peek_mut
|
||||
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
|
||||
pub struct PeekMut<'a, T: 'a + Ord> {
|
||||
heap: &'a mut BinaryHeap<T>
|
||||
heap: &'a mut BinaryHeap<T>,
|
||||
}
|
||||
|
||||
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
|
||||
@ -385,9 +385,7 @@ impl<T: Ord> BinaryHeap<T> {
|
||||
if self.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PeekMut {
|
||||
heap: self
|
||||
})
|
||||
Some(PeekMut { heap: self })
|
||||
}
|
||||
}
|
||||
|
||||
@ -1126,7 +1124,9 @@ impl<T: Ord> IntoIterator for BinaryHeap<T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<'a, T> IntoIterator for &'a BinaryHeap<T> where T: Ord {
|
||||
impl<'a, T> IntoIterator for &'a BinaryHeap<T>
|
||||
where T: Ord
|
||||
{
|
||||
type Item = &'a T;
|
||||
type IntoIter = Iter<'a, T>;
|
||||
|
||||
|
@ -63,7 +63,9 @@ pub trait ToOwned {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T> ToOwned for T where T: Clone {
|
||||
impl<T> ToOwned for T
|
||||
where T: Clone
|
||||
{
|
||||
type Owned = T;
|
||||
fn to_owned(&self) -> T {
|
||||
self.clone()
|
||||
@ -117,17 +119,19 @@ pub enum Cow<'a, B: ?Sized + 'a>
|
||||
{
|
||||
/// Borrowed data.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B),
|
||||
Borrowed(#[stable(feature = "rust1", since = "1.0.0")]
|
||||
&'a B),
|
||||
|
||||
/// Owned data.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
Owned(
|
||||
#[stable(feature = "rust1", since = "1.0.0")] <B as ToOwned>::Owned
|
||||
),
|
||||
Owned(#[stable(feature = "rust1", since = "1.0.0")]
|
||||
<B as ToOwned>::Owned),
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<'a, B: ?Sized> Clone for Cow<'a, B> where B: ToOwned {
|
||||
impl<'a, B: ?Sized> Clone for Cow<'a, B>
|
||||
where B: ToOwned
|
||||
{
|
||||
fn clone(&self) -> Cow<'a, B> {
|
||||
match *self {
|
||||
Borrowed(b) => Borrowed(b),
|
||||
@ -139,7 +143,9 @@ impl<'a, B: ?Sized> Clone for Cow<'a, B> where B: ToOwned {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, B: ?Sized> Cow<'a, B> where B: ToOwned {
|
||||
impl<'a, B: ?Sized> Cow<'a, B>
|
||||
where B: ToOwned
|
||||
{
|
||||
/// Acquires a mutable reference to the owned form of the data.
|
||||
///
|
||||
/// Clones the data if it is not already owned.
|
||||
@ -194,7 +200,9 @@ impl<'a, B: ?Sized> Cow<'a, B> where B: ToOwned {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<'a, B: ?Sized> Deref for Cow<'a, B> where B: ToOwned {
|
||||
impl<'a, B: ?Sized> Deref for Cow<'a, B>
|
||||
where B: ToOwned
|
||||
{
|
||||
type Target = B;
|
||||
|
||||
fn deref(&self) -> &B {
|
||||
@ -209,7 +217,9 @@ impl<'a, B: ?Sized> Deref for Cow<'a, B> where B: ToOwned {
|
||||
impl<'a, B: ?Sized> Eq for Cow<'a, B> where B: Eq + ToOwned {}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<'a, B: ?Sized> Ord for Cow<'a, B> where B: Ord + ToOwned {
|
||||
impl<'a, B: ?Sized> Ord for Cow<'a, B>
|
||||
where B: Ord + ToOwned
|
||||
{
|
||||
#[inline]
|
||||
fn cmp(&self, other: &Cow<'a, B>) -> Ordering {
|
||||
Ord::cmp(&**self, &**other)
|
||||
@ -228,7 +238,9 @@ impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<'a, B: ?Sized> PartialOrd for Cow<'a, B> where B: PartialOrd + ToOwned {
|
||||
impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>
|
||||
where B: PartialOrd + ToOwned
|
||||
{
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {
|
||||
PartialOrd::partial_cmp(&**self, &**other)
|
||||
@ -273,7 +285,9 @@ impl<'a, B: ?Sized> Default for Cow<'a, B>
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<'a, B: ?Sized> Hash for Cow<'a, B> where B: Hash + ToOwned {
|
||||
impl<'a, B: ?Sized> Hash for Cow<'a, B>
|
||||
where B: Hash + ToOwned
|
||||
{
|
||||
#[inline]
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
Hash::hash(&**self, state)
|
||||
|
@ -276,7 +276,8 @@ impl<E: CLike> FromIterator<E> for EnumSet<E> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, E> IntoIterator for &'a EnumSet<E> where E: CLike
|
||||
impl<'a, E> IntoIterator for &'a EnumSet<E>
|
||||
where E: CLike
|
||||
{
|
||||
type Item = E;
|
||||
type IntoIter = Iter<E>;
|
||||
|
@ -225,15 +225,17 @@ impl<T> LinkedList<T> {
|
||||
pub fn append(&mut self, other: &mut Self) {
|
||||
match self.tail {
|
||||
None => mem::swap(self, other),
|
||||
Some(tail) => if let Some(other_head) = other.head.take() {
|
||||
unsafe {
|
||||
(**tail).next = Some(other_head);
|
||||
(**other_head).prev = Some(tail);
|
||||
}
|
||||
Some(tail) => {
|
||||
if let Some(other_head) = other.head.take() {
|
||||
unsafe {
|
||||
(**tail).next = Some(other_head);
|
||||
(**other_head).prev = Some(tail);
|
||||
}
|
||||
|
||||
self.tail = other.tail.take();
|
||||
self.len += mem::replace(&mut other.len, 0);
|
||||
},
|
||||
self.tail = other.tail.take();
|
||||
self.len += mem::replace(&mut other.len, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -674,7 +676,10 @@ impl<T> LinkedList<T> {
|
||||
reason = "method name and placement protocol are subject to change",
|
||||
issue = "30172")]
|
||||
pub fn front_place(&mut self) -> FrontPlace<T> {
|
||||
FrontPlace { list: self, node: IntermediateBox::make_place() }
|
||||
FrontPlace {
|
||||
list: self,
|
||||
node: IntermediateBox::make_place(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a place for insertion at the back of the list.
|
||||
@ -699,7 +704,10 @@ impl<T> LinkedList<T> {
|
||||
reason = "method name and placement protocol are subject to change",
|
||||
issue = "30172")]
|
||||
pub fn back_place(&mut self) -> BackPlace<T> {
|
||||
BackPlace { list: self, node: IntermediateBox::make_place() }
|
||||
BackPlace {
|
||||
list: self,
|
||||
node: IntermediateBox::make_place(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -852,7 +860,7 @@ impl<'a, T> IterMut<'a, T> {
|
||||
(**head).prev = node;
|
||||
|
||||
self.list.len += 1;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -1135,9 +1143,15 @@ impl<'a, T> InPlace<T> for BackPlace<'a, T> {
|
||||
// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
|
||||
#[allow(dead_code)]
|
||||
fn assert_covariance() {
|
||||
fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> { x }
|
||||
fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> { x }
|
||||
fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> { x }
|
||||
fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
|
||||
x
|
||||
}
|
||||
fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
|
||||
x
|
||||
}
|
||||
fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
|
||||
x
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -1298,10 +1312,10 @@ mod tests {
|
||||
fn test_send() {
|
||||
let n = list_from(&[1, 2, 3]);
|
||||
thread::spawn(move || {
|
||||
check_links(&n);
|
||||
let a: &[_] = &[&1, &2, &3];
|
||||
assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]);
|
||||
})
|
||||
check_links(&n);
|
||||
let a: &[_] = &[&1, &2, &3];
|
||||
assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]);
|
||||
})
|
||||
.join()
|
||||
.ok()
|
||||
.unwrap();
|
||||
|
@ -1697,11 +1697,7 @@ impl str {
|
||||
debug_assert!('Σ'.len_utf8() == 2);
|
||||
let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) &&
|
||||
!case_ignoreable_then_cased(from[i + 2..].chars());
|
||||
to.push_str(if is_word_final {
|
||||
"ς"
|
||||
} else {
|
||||
"σ"
|
||||
});
|
||||
to.push_str(if is_word_final { "ς" } else { "σ" });
|
||||
}
|
||||
|
||||
fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
|
||||
|
@ -542,11 +542,7 @@ impl String {
|
||||
unsafe { *xs.get_unchecked(i) }
|
||||
}
|
||||
fn safe_get(xs: &[u8], i: usize, total: usize) -> u8 {
|
||||
if i >= total {
|
||||
0
|
||||
} else {
|
||||
unsafe_get(xs, i)
|
||||
}
|
||||
if i >= total { 0 } else { unsafe_get(xs, i) }
|
||||
}
|
||||
|
||||
let mut res = String::with_capacity(total);
|
||||
@ -976,7 +972,7 @@ impl String {
|
||||
pub fn push(&mut self, ch: char) {
|
||||
match ch.len_utf8() {
|
||||
1 => self.vec.push(ch as u8),
|
||||
_ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0;4]).as_bytes()),
|
||||
_ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1935,7 +1931,7 @@ impl<'a> FromIterator<String> for Cow<'a, str> {
|
||||
|
||||
#[stable(feature = "from_string_for_vec_u8", since = "1.14.0")]
|
||||
impl From<String> for Vec<u8> {
|
||||
fn from(string : String) -> Vec<u8> {
|
||||
fn from(string: String) -> Vec<u8> {
|
||||
string.into_bytes()
|
||||
}
|
||||
}
|
||||
|
@ -206,11 +206,7 @@ impl<T> VecDeque<T> {
|
||||
unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
|
||||
#[allow(dead_code)]
|
||||
fn diff(a: usize, b: usize) -> usize {
|
||||
if a <= b {
|
||||
b - a
|
||||
} else {
|
||||
a - b
|
||||
}
|
||||
if a <= b { b - a } else { a - b }
|
||||
}
|
||||
debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
|
||||
"wrc dst={} src={} len={} cap={}",
|
||||
@ -552,8 +548,8 @@ impl<T> VecDeque<T> {
|
||||
let old_cap = self.cap();
|
||||
let used_cap = self.len() + 1;
|
||||
let new_cap = used_cap.checked_add(additional)
|
||||
.and_then(|needed_cap| needed_cap.checked_next_power_of_two())
|
||||
.expect("capacity overflow");
|
||||
.and_then(|needed_cap| needed_cap.checked_next_power_of_two())
|
||||
.expect("capacity overflow");
|
||||
|
||||
if new_cap > self.capacity() {
|
||||
self.buf.reserve_exact(used_cap, new_cap - used_cap);
|
||||
@ -1293,9 +1289,7 @@ impl<T> VecDeque<T> {
|
||||
|
||||
let contiguous = self.is_contiguous();
|
||||
|
||||
match (contiguous,
|
||||
distance_to_tail <= distance_to_head,
|
||||
idx >= self.tail) {
|
||||
match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
|
||||
(true, true, _) if index == 0 => {
|
||||
// push_front
|
||||
//
|
||||
@ -1513,9 +1507,7 @@ impl<T> VecDeque<T> {
|
||||
|
||||
let contiguous = self.is_contiguous();
|
||||
|
||||
match (contiguous,
|
||||
distance_to_tail <= distance_to_head,
|
||||
idx >= self.tail) {
|
||||
match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
|
||||
(true, true, _) => {
|
||||
unsafe {
|
||||
// contiguous, remove closer to tail:
|
||||
@ -1812,7 +1804,7 @@ fn wrap_index(index: usize, size: usize) -> usize {
|
||||
}
|
||||
|
||||
/// Returns the two slices that cover the VecDeque's valid range
|
||||
trait RingSlices : Sized {
|
||||
trait RingSlices: Sized {
|
||||
fn slice(self, from: usize, to: usize) -> Self;
|
||||
fn split_at(self, i: usize) -> (Self, Self);
|
||||
|
||||
@ -1895,7 +1887,7 @@ impl<'a, T> Iterator for Iter<'a, T> {
|
||||
}
|
||||
|
||||
fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
|
||||
where F: FnMut(Acc, Self::Item) -> Acc,
|
||||
where F: FnMut(Acc, Self::Item) -> Acc
|
||||
{
|
||||
let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
|
||||
accum = front.iter().fold(accum, &mut f);
|
||||
@ -1959,7 +1951,7 @@ impl<'a, T> Iterator for IterMut<'a, T> {
|
||||
}
|
||||
|
||||
fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
|
||||
where F: FnMut(Acc, Self::Item) -> Acc,
|
||||
where F: FnMut(Acc, Self::Item) -> Acc
|
||||
{
|
||||
let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
|
||||
accum = front.iter_mut().fold(accum, &mut f);
|
||||
@ -2082,17 +2074,15 @@ impl<'a, T: 'a> Drop for Drain<'a, T> {
|
||||
(_, 0) => {
|
||||
source_deque.head = drain_tail;
|
||||
}
|
||||
_ => {
|
||||
unsafe {
|
||||
if tail_len <= head_len {
|
||||
source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
|
||||
source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
|
||||
} else {
|
||||
source_deque.head = source_deque.wrap_add(drain_tail, head_len);
|
||||
source_deque.wrap_copy(drain_tail, drain_head, head_len);
|
||||
}
|
||||
_ => unsafe {
|
||||
if tail_len <= head_len {
|
||||
source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
|
||||
source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
|
||||
} else {
|
||||
source_deque.head = source_deque.wrap_add(drain_tail, head_len);
|
||||
source_deque.wrap_copy(drain_tail, drain_head, head_len);
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2288,10 +2278,8 @@ impl<T> From<Vec<T>> for VecDeque<T> {
|
||||
|
||||
// We need to extend the buf if it's not a power of two, too small
|
||||
// or doesn't have at least one free space
|
||||
if !buf.cap().is_power_of_two()
|
||||
|| (buf.cap() < (MINIMUM_CAPACITY + 1))
|
||||
|| (buf.cap() == len)
|
||||
{
|
||||
if !buf.cap().is_power_of_two() || (buf.cap() < (MINIMUM_CAPACITY + 1)) ||
|
||||
(buf.cap() == len) {
|
||||
let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
|
||||
buf.reserve_exact(len, cap - len);
|
||||
}
|
||||
@ -2299,7 +2287,7 @@ impl<T> From<Vec<T>> for VecDeque<T> {
|
||||
VecDeque {
|
||||
tail: 0,
|
||||
head: len,
|
||||
buf: buf
|
||||
buf: buf,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2324,18 +2312,17 @@ impl<T> From<VecDeque<T>> for Vec<T> {
|
||||
// do this in at most three copy moves.
|
||||
if (cap - tail) > head {
|
||||
// right hand block is the long one; move that enough for the left
|
||||
ptr::copy(
|
||||
buf.offset(tail as isize),
|
||||
buf.offset((tail - head) as isize),
|
||||
cap - tail);
|
||||
ptr::copy(buf.offset(tail as isize),
|
||||
buf.offset((tail - head) as isize),
|
||||
cap - tail);
|
||||
// copy left in the end
|
||||
ptr::copy(buf, buf.offset((cap - head) as isize), head);
|
||||
// shift the new thing to the start
|
||||
ptr::copy(buf.offset((tail-head) as isize), buf, len);
|
||||
ptr::copy(buf.offset((tail - head) as isize), buf, len);
|
||||
} else {
|
||||
// left hand block is the long one, we can do it in two!
|
||||
ptr::copy(buf, buf.offset((cap-tail) as isize), head);
|
||||
ptr::copy(buf.offset(tail as isize), buf, cap-tail);
|
||||
ptr::copy(buf, buf.offset((cap - tail) as isize), head);
|
||||
ptr::copy(buf.offset(tail as isize), buf, cap - tail);
|
||||
}
|
||||
} else {
|
||||
// Need to use N swaps to move the ring
|
||||
@ -2576,8 +2563,8 @@ mod tests {
|
||||
|
||||
// We should see the correct values in the VecDeque
|
||||
let expected: VecDeque<_> = (0..drain_start)
|
||||
.chain(drain_end..len)
|
||||
.collect();
|
||||
.chain(drain_end..len)
|
||||
.collect();
|
||||
assert_eq!(expected, tester);
|
||||
}
|
||||
}
|
||||
@ -2693,19 +2680,19 @@ mod tests {
|
||||
let cap = (2i32.pow(cap_pwr) - 1) as usize;
|
||||
|
||||
// In these cases there is enough free space to solve it with copies
|
||||
for len in 0..((cap+1)/2) {
|
||||
for len in 0..((cap + 1) / 2) {
|
||||
// Test contiguous cases
|
||||
for offset in 0..(cap-len) {
|
||||
for offset in 0..(cap - len) {
|
||||
create_vec_and_test_convert(cap, offset, len)
|
||||
}
|
||||
|
||||
// Test cases where block at end of buffer is bigger than block at start
|
||||
for offset in (cap-len)..(cap-(len/2)) {
|
||||
for offset in (cap - len)..(cap - (len / 2)) {
|
||||
create_vec_and_test_convert(cap, offset, len)
|
||||
}
|
||||
|
||||
// Test cases where block at start of buffer is bigger than block at end
|
||||
for offset in (cap-(len/2))..cap {
|
||||
for offset in (cap - (len / 2))..cap {
|
||||
create_vec_and_test_convert(cap, offset, len)
|
||||
}
|
||||
}
|
||||
@ -2714,19 +2701,19 @@ mod tests {
|
||||
// the ring will use swapping when:
|
||||
// (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
|
||||
// right block size > free space && left block size > free space
|
||||
for len in ((cap+1)/2)..cap {
|
||||
for len in ((cap + 1) / 2)..cap {
|
||||
// Test contiguous cases
|
||||
for offset in 0..(cap-len) {
|
||||
for offset in 0..(cap - len) {
|
||||
create_vec_and_test_convert(cap, offset, len)
|
||||
}
|
||||
|
||||
// Test cases where block at end of buffer is bigger than block at start
|
||||
for offset in (cap-len)..(cap-(len/2)) {
|
||||
for offset in (cap - len)..(cap - (len / 2)) {
|
||||
create_vec_and_test_convert(cap, offset, len)
|
||||
}
|
||||
|
||||
// Test cases where block at start of buffer is bigger than block at end
|
||||
for offset in (cap-(len/2))..cap {
|
||||
for offset in (cap - (len / 2))..cap {
|
||||
create_vec_and_test_convert(cap, offset, len)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user