rollup merge of #21396: japaric/no-parens-in-range

Conflicts:
	src/libsyntax/parse/lexer/comments.rs
This commit is contained in:
Alex Crichton 2015-01-21 09:15:15 -08:00
commit 1646707c6e
38 changed files with 70 additions and 70 deletions

View File

@ -330,7 +330,7 @@ impl Bitv {
if extra_bytes > 0 {
let mut last_word = 0u32;
for (i, &byte) in bytes[(complete_words*4)..].iter().enumerate() {
for (i, &byte) in bytes[complete_words*4..].iter().enumerate() {
last_word |= (reverse_bits(byte) as u32) << (i * 8);
}
bitv.storage.push(last_word);

View File

@ -2158,7 +2158,7 @@ mod tests {
#[should_fail]
fn test_slice_out_of_bounds_1() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
&x[(-1)..];
&x[-1..];
}
#[test]
@ -2172,7 +2172,7 @@ mod tests {
#[should_fail]
fn test_slice_out_of_bounds_3() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
&x[(-1)..4];
&x[-1..4];
}
#[test]

View File

@ -240,7 +240,7 @@ impl<T> SliceExt for [T] {
#[inline]
fn init(&self) -> &[T] {
&self[..(self.len() - 1)]
&self[..self.len() - 1]
}
#[inline]
@ -449,7 +449,7 @@ impl<T> SliceExt for [T] {
#[inline]
fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
let (m, n) = (self.len(), needle.len());
m >= n && needle == &self[(m-n)..]
m >= n && needle == &self[m-n..]
}
#[unstable]
@ -973,7 +973,7 @@ impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[..idx]);
self.v = &self.v[(idx + 1)..];
self.v = &self.v[idx + 1..];
ret
}
}
@ -998,7 +998,7 @@ impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> boo
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[(idx + 1)..]);
let ret = Some(&self.v[idx + 1..]);
self.v = &self.v[..idx];
ret
}

View File

@ -1415,7 +1415,7 @@ impl StrExt for str {
#[inline]
fn ends_with(&self, needle: &str) -> bool {
let (m, n) = (self.len(), needle.len());
m >= n && needle.as_bytes() == &self.as_bytes()[(m-n)..]
m >= n && needle.as_bytes() == &self.as_bytes()[m-n..]
}
#[inline]

View File

@ -585,7 +585,7 @@ fn check_randacc_iter<A, T>(a: T, len: uint) where
fn test_double_ended_flat_map() {
let u = [0u,1];
let v = [5u,6,7,8];
let mut it = u.iter().flat_map(|x| v[(*x)..v.len()].iter());
let mut it = u.iter().flat_map(|x| v[*x..v.len()].iter());
assert_eq!(it.next_back().unwrap(), &8);
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.next_back().unwrap(), &7);

View File

@ -893,7 +893,7 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
(B, Cr, UnderLim) => { B }
(B, Cr, OverLim) if (i - last_start + 1) > lim
=> panic!("word starting with {} longer than limit!",
&ss[last_start..(i + 1)]),
&ss[last_start..i + 1]),
(B, Cr, OverLim) => {
*cont = it(&ss[slice_start..last_end]);
slice_start = last_start;

View File

@ -518,7 +518,7 @@ impl<'a> Parser<'a> {
};
self.chari = closer;
let greed = try!(self.get_next_greedy());
let inner = self.chars[(start+1)..closer].iter().cloned()
let inner = self.chars[start+1..closer].iter().cloned()
.collect::<String>();
// Parse the min and max values from the regex.

View File

@ -74,7 +74,7 @@ fn lookup_hash<'a, F>(d: rbml::Doc<'a>, mut eq_fn: F, hash: u64) -> Option<rbml:
let mut ret = None;
reader::tagged_docs(tagged_doc.doc, belt, |elt| {
let pos = u64_from_be_bytes(elt.data, elt.start, 4) as uint;
if eq_fn(&elt.data[(elt.start + 4) .. elt.end]) {
if eq_fn(&elt.data[elt.start + 4 .. elt.end]) {
ret = Some(reader::doc_at(d.data, pos).unwrap().doc);
false
} else {

View File

@ -707,7 +707,7 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
}
let crate_part = &buf[0u..colon_idx];
let def_part = &buf[(colon_idx + 1u)..len];
let def_part = &buf[colon_idx + 1u..len];
let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::<uint>()) {
Some(cn) => cn as ast::CrateNum,

View File

@ -926,7 +926,7 @@ pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat],
};
head.map(|mut head| {
head.push_all(&r[..col]);
head.push_all(&r[(col + 1)..]);
head.push_all(&r[col + 1..]);
head
})
}

View File

@ -503,7 +503,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
0
};
for t in tps[..(tps.len() - num_defaults)].iter() {
for t in tps[..tps.len() - num_defaults].iter() {
strs.push(ty_to_string(cx, *t))
}
@ -511,9 +511,9 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
format!("{}({}){}",
base,
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
&strs[0][1 .. (strs[0].len() - 2)] // Remove '(' and ',)'
&strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)'
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
&strs[0][1 .. (strs[0].len() - 1)] // Remove '(' and ')'
&strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')'
} else {
&strs[0][]
},

View File

@ -156,7 +156,7 @@ impl FixedBuffer for FixedBuffer64 {
// While we have at least a full buffer size chunk's worth of data, process that data
// without copying it into the buffer
while input.len() - i >= size {
func(&input[i..(i + size)]);
func(&input[i..i + size]);
i += size;
}

View File

@ -2085,8 +2085,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
let msg = format!("Could not find `{}` in `{}`",
// idx +- 1 to account for the
// colons on either side
&mpath[(idx + 1)..],
&mpath[..(idx - 1)]);
&mpath[idx + 1..],
&mpath[..idx - 1]);
return Failed(Some((span, msg)));
},
None => {
@ -2762,7 +2762,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
for (i, rib) in ribs.iter().enumerate().rev() {
match rib.bindings.get(&name).cloned() {
Some(def_like) => {
return self.upvarify(&ribs[(i + 1)..], def_like, span);
return self.upvarify(&ribs[i + 1..], def_like, span);
}
None => {
// Continue.

View File

@ -1183,7 +1183,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
// against the archive.
if sess.lto() {
let name = cratepath.filename_str().unwrap();
let name = &name[3..(name.len() - 5)]; // chop off lib/.rlib
let name = &name[3..name.len() - 5]; // chop off lib/.rlib
time(sess.time_passes(),
&format!("altering {}.rlib", name)[],
(), |()| {

View File

@ -60,7 +60,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
let file = path.filename_str().unwrap();
let file = &file[3..(file.len() - 5)]; // chop off lib/.rlib
let file = &file[3..file.len() - 5]; // chop off lib/.rlib
debug!("reading {}", file);
for i in iter::count(0u, 1) {
let bc_encoded = time(sess.time_passes(),
@ -201,7 +201,7 @@ fn extract_compressed_bytecode_size_v1(bc: &[u8]) -> u64 {
}
fn read_from_le_bytes<T: Int>(bytes: &[u8], position_in_bytes: uint) -> T {
let byte_data = &bytes[position_in_bytes..(position_in_bytes + mem::size_of::<T>())];
let byte_data = &bytes[position_in_bytes..position_in_bytes + mem::size_of::<T>()];
let data = unsafe {
*(byte_data.as_ptr() as *const T)
};

View File

@ -186,7 +186,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
if len <= 2 {
return;
}
let sub_paths = &sub_paths[..(len-2)];
let sub_paths = &sub_paths[..len-2];
for &(ref span, ref qualname) in sub_paths.iter() {
self.fmt.sub_mod_ref_str(path.span,
*span,

View File

@ -472,7 +472,7 @@ fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
enter_match(bcx, dm, m, col, val, |pats| {
if pat_is_binding_or_wild(dm, &*pats[col]) {
let mut r = pats[..col].to_vec();
r.push_all(&pats[(col + 1)..]);
r.push_all(&pats[col + 1..]);
Some(r)
} else {
None
@ -983,7 +983,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let dm = &tcx.def_map;
let mut vals_left = vals[0u..col].to_vec();
vals_left.push_all(&vals[(col + 1u)..]);
vals_left.push_all(&vals[col + 1u..]);
let ccx = bcx.fcx.ccx;
// Find a real id (we're adding placeholder wildcard patterns, but

View File

@ -361,7 +361,7 @@ fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type {
}
_ => unreachable!(),
};
let vec_len = llvec_len(&cls[(i + 1u)..]);
let vec_len = llvec_len(&cls[i + 1u..]);
let vec_ty = Type::vector(&elt_ty, vec_len as u64 * elts_per_word);
tys.push(vec_ty);
i += vec_len;

View File

@ -1137,7 +1137,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
if let Some(code_snippet) = code_snippet {
let bytes = code_snippet.as_bytes();
if bytes.len() > 0 && &bytes[(bytes.len()-1)..] == b"}" {
if bytes.len() > 0 && &bytes[bytes.len()-1..] == b"}" {
cleanup_span = Span {
lo: node_span.hi - codemap::BytePos(1),
hi: node_span.hi,
@ -1726,7 +1726,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
let work_dir = cx.sess().working_dir.as_str().unwrap();
let file_name =
if full_path.starts_with(work_dir) {
&full_path[(work_dir.len() + 1u)..full_path.len()]
&full_path[work_dir.len() + 1u..full_path.len()]
} else {
full_path
};

View File

@ -358,7 +358,7 @@ fn path<F, G>(w: &mut fmt::Formatter,
// This is a documented path, link to it!
Some((ref fqp, shortty)) if abs_root.is_some() => {
let mut url = String::from_str(abs_root.unwrap().as_slice());
let to_link = &fqp[..(fqp.len() - 1)];
let to_link = &fqp[..fqp.len() - 1];
for component in to_link.iter() {
url.push_str(component.as_slice());
url.push_str("/");

View File

@ -404,7 +404,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::IoResult<String>
search_index.push(IndexItem {
ty: shortty(item),
name: item.name.clone().unwrap(),
path: fqp[..(fqp.len() - 1)].connect("::"),
path: fqp[..fqp.len() - 1].connect("::"),
desc: shorter(item.doc_value()).to_string(),
parent: Some(did),
});
@ -559,7 +559,7 @@ fn write_shared(cx: &Context,
};
let mut mydst = dst.clone();
for part in remote_path[..(remote_path.len() - 1)].iter() {
for part in remote_path[..remote_path.len() - 1].iter() {
mydst.push(part.as_slice());
try!(mkdir(&mydst));
}
@ -842,7 +842,7 @@ impl DocFolder for Cache {
clean::StructFieldItem(..) |
clean::VariantItem(..) => {
((Some(*self.parent_stack.last().unwrap()),
Some(&self.stack[..(self.stack.len() - 1)])),
Some(&self.stack[..self.stack.len() - 1])),
false)
}
clean::MethodItem(..) => {
@ -853,13 +853,13 @@ impl DocFolder for Cache {
let did = *last;
let path = match self.paths.get(&did) {
Some(&(_, ItemType::Trait)) =>
Some(&self.stack[..(self.stack.len() - 1)]),
Some(&self.stack[..self.stack.len() - 1]),
// The current stack not necessarily has correlation for
// where the type was defined. On the other hand,
// `paths` always has the right information if present.
Some(&(ref fqp, ItemType::Struct)) |
Some(&(ref fqp, ItemType::Enum)) =>
Some(&fqp[..(fqp.len() - 1)]),
Some(&fqp[..fqp.len() - 1]),
Some(..) => Some(self.stack.as_slice()),
None => None
};
@ -1185,7 +1185,7 @@ impl Context {
.collect::<String>();
match cache().paths.get(&it.def_id) {
Some(&(ref names, _)) => {
for name in (&names[..(names.len() - 1)]).iter() {
for name in (&names[..names.len() - 1]).iter() {
url.push_str(name.as_slice());
url.push_str("/");
}

View File

@ -1298,7 +1298,7 @@ impl Stack {
InternalIndex(i) => StackElement::Index(i),
InternalKey(start, size) => {
StackElement::Key(str::from_utf8(
&self.str_buffer[(start as uint) .. (start as uint + size as uint)])
&self.str_buffer[start as uint .. start as uint + size as uint])
.unwrap())
}
}
@ -1341,7 +1341,7 @@ impl Stack {
Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
Some(&InternalKey(start, size)) => {
Some(StackElement::Key(str::from_utf8(
&self.str_buffer[(start as uint) .. (start+size) as uint]
&self.str_buffer[start as uint .. (start+size) as uint]
).unwrap()))
}
}

View File

@ -281,9 +281,9 @@ impl<W: Writer> Writer for LineBufferedWriter<W> {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
match buf.iter().rposition(|&b| b == b'\n') {
Some(i) => {
try!(self.inner.write(&buf[..(i + 1)]));
try!(self.inner.write(&buf[..i + 1]));
try!(self.inner.flush());
try!(self.inner.write(&buf[(i + 1)..]));
try!(self.inner.write(&buf[i + 1..]));
Ok(())
}
None => self.inner.write(buf),

View File

@ -159,7 +159,7 @@ impl Reader for MemReader {
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
let input = &self.buf[self.pos.. (self.pos + write_len)];
let input = &self.buf[self.pos.. self.pos + write_len];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
@ -349,7 +349,7 @@ impl<'a> Reader for BufReader<'a> {
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
let input = &self.buf[self.pos.. (self.pos + write_len)];
let input = &self.buf[self.pos.. self.pos + write_len];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);

View File

@ -1449,7 +1449,7 @@ pub trait Buffer: Reader {
};
match available.iter().position(|&b| b == byte) {
Some(i) => {
res.push_all(&available[..(i + 1)]);
res.push_all(&available[..i + 1]);
used = i + 1;
break
}

View File

@ -399,7 +399,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe {
match name.rposition_elem(&dot) {
None | Some(0) => None,
Some(1) if name == b".." => None,
Some(pos) => Some(&name[(pos+1)..])
Some(pos) => Some(&name[pos+1..])
}
}
}

View File

@ -126,7 +126,7 @@ impl GenericPathUnsafe for Path {
None => {
self.repr = Path::normalize(filename);
}
Some(idx) if &self.repr[(idx+1)..] == b".." => {
Some(idx) if &self.repr[idx+1..] == b".." => {
let mut v = Vec::with_capacity(self.repr.len() + 1 + filename.len());
v.push_all(self.repr.as_slice());
v.push(SEP_BYTE);
@ -136,7 +136,7 @@ impl GenericPathUnsafe for Path {
}
Some(idx) => {
let mut v = Vec::with_capacity(idx + 1 + filename.len());
v.push_all(&self.repr[..(idx+1)]);
v.push_all(&self.repr[..idx+1]);
v.push_all(filename);
// FIXME: this is slow
self.repr = Path::normalize(v.as_slice());
@ -178,7 +178,7 @@ impl GenericPath for Path {
None if b".." == self.repr => self.repr.as_slice(),
None => dot_static,
Some(0) => &self.repr[..1],
Some(idx) if &self.repr[(idx+1)..] == b".." => self.repr.as_slice(),
Some(idx) if &self.repr[idx+1..] == b".." => self.repr.as_slice(),
Some(idx) => &self.repr[..idx]
}
}
@ -188,9 +188,9 @@ impl GenericPath for Path {
None if b"." == self.repr ||
b".." == self.repr => None,
None => Some(self.repr.as_slice()),
Some(idx) if &self.repr[(idx+1)..] == b".." => None,
Some(idx) if &self.repr[idx+1..] == b".." => None,
Some(0) if self.repr[1..].is_empty() => None,
Some(idx) => Some(&self.repr[(idx+1)..])
Some(idx) => Some(&self.repr[idx+1..])
}
}

View File

@ -428,10 +428,10 @@ impl GenericPath for Path {
if self.prefix.is_some() {
Some(Path::new(match self.prefix {
Some(DiskPrefix) if self.is_absolute() => {
&self.repr[..(self.prefix_len()+1)]
&self.repr[..self.prefix_len()+1]
}
Some(VerbatimDiskPrefix) => {
&self.repr[..(self.prefix_len()+1)]
&self.repr[..self.prefix_len()+1]
}
_ => &self.repr[..self.prefix_len()]
}))
@ -635,7 +635,7 @@ impl Path {
Some(_) => {
let plen = self.prefix_len();
if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE {
&repr[(plen+1)..]
&repr[plen+1..]
} else { &repr[plen..] }
}
None if repr.as_bytes()[0] == SEP_BYTE => &repr[1..],
@ -786,9 +786,9 @@ impl Path {
}
Some(UNCPrefix(a,b)) => {
s.push_str("\\\\");
s.push_str(&prefix_[2..(a+2)]);
s.push_str(&prefix_[2..a+2]);
s.push(SEP);
s.push_str(&prefix_[(3+a)..(3+a+b)]);
s.push_str(&prefix_[3+a..3+a+b]);
}
Some(_) => s.push_str(prefix_),
None => ()
@ -813,7 +813,7 @@ impl Path {
fn update_sepidx(&mut self) {
let s = if self.has_nonsemantic_trailing_slash() {
&self.repr[..(self.repr.len()-1)]
&self.repr[..self.repr.len()-1]
} else { &self.repr[] };
let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) {
is_sep
@ -1029,7 +1029,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
None => return None,
Some(x) => x
};
path = &path[(idx_a+1)..];
path = &path[idx_a+1..];
let idx_b = path.find(f).unwrap_or(path.len());
Some((idx_a, idx_b))
}

View File

@ -362,7 +362,7 @@ pub fn write(w: &mut Writer) -> IoResult<()> {
let bytes = unsafe { ffi::c_str_to_bytes(&ptr) };
match str::from_utf8(bytes) {
Ok(s) => try!(demangle(w, s)),
Err(..) => try!(w.write(&bytes[..(bytes.len()-1)])),
Err(..) => try!(w.write(&bytes[..bytes.len()-1])),
}
}
try!(w.write(&['\n' as u8]));

View File

@ -525,7 +525,7 @@ impl<'ast> Map<'ast> {
NodesMatchingSuffix {
map: self,
item_name: parts.last().unwrap(),
in_which: &parts[..(parts.len() - 1)],
in_which: &parts[..parts.len() - 1],
idx: 0,
}
}

View File

@ -284,7 +284,7 @@ fn print_maybe_styled(w: &mut EmitterWriter,
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_str(&msg[..(msg.len()-1)]));
try!(t.write_str(&msg[..msg.len()-1]));
try!(t.reset());
try!(t.write_str("\n"));
} else {

View File

@ -116,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
if can_trim {
lines.iter().map(|line| {
(&line[(i + 1)..line.len()]).to_string()
(&line[i + 1..line.len()]).to_string()
}).collect()
} else {
lines
@ -132,7 +132,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
}
if comment.starts_with("/*") {
let lines = comment[3us..(comment.len() - 2us)]
let lines = comment[3..comment.len() - 2]
.lines_any()
.map(|s| s.to_string())
.collect::<Vec<String> >();

View File

@ -284,12 +284,12 @@ pub fn parse(file: &mut io::Reader, longnames: bool)
// Find the offset of the NUL we want to go to
let nulpos = string_table[(offset as uint) .. (string_table_bytes as uint)]
let nulpos = string_table[offset as uint .. string_table_bytes as uint]
.iter().position(|&b| b == 0);
match nulpos {
Some(len) => {
string_map.insert(name.to_string(),
string_table[(offset as uint) ..
string_table[offset as uint ..
(offset as uint + len)].to_vec())
},
None => {

View File

@ -103,7 +103,7 @@ impl Perm {
let d = idx / self.fact[i] as i32;
self.cnt[i] = d;
idx %= self.fact[i] as i32;
for (place, val) in pp.iter_mut().zip(self.perm.p[..(i+1)].iter()) {
for (place, val) in pp.iter_mut().zip(self.perm.p[..i+1].iter()) {
*place = (*val) as u8
}

View File

@ -97,7 +97,7 @@ fn make_fasta<W: Writer, I: Iterator<Item=u8>>(
}
n -= nb;
line[nb] = '\n' as u8;
try!(wr.write(&line[..(nb+1)]));
try!(wr.write(&line[..nb+1]));
}
Ok(())
}

View File

@ -101,11 +101,11 @@ fn windows_with_carry<F>(bb: &[u8], nn: uint, mut it: F) -> Vec<u8> where
let len = bb.len();
while ii < len - (nn - 1u) {
it(&bb[ii..(ii+nn)]);
it(&bb[ii..ii+nn]);
ii += 1u;
}
return bb[(len - (nn - 1u))..len].to_vec();
return bb[len - (nn - 1u)..len].to_vec();
}
fn make_sequence_processor(sz: uint,

View File

@ -21,6 +21,6 @@ pub fn main() {
// Unsized type.
let arr: &[_] = &[1us, 2, 3];
let range = (*arr)..;
let range = *arr..;
//~^ ERROR the trait `core::marker::Sized` is not implemented
}

View File

@ -12,7 +12,7 @@
pub fn main() {
let r = {
(&42is)..&42
&42is..&42
//~^ ERROR borrowed value does not live long enough
//~^^ ERROR borrowed value does not live long enough
};