rollup merge of #20721: japaric/snap
Conflicts: src/libcollections/vec.rs src/libcore/fmt/mod.rs src/librustc/lint/builtin.rs src/librustc/session/config.rs src/librustc_trans/trans/base.rs src/librustc_trans/trans/context.rs src/librustc_trans/trans/type_.rs src/librustc_typeck/check/_match.rs src/librustdoc/html/format.rs src/libsyntax/std_inject.rs src/libsyntax/util/interner.rs src/test/compile-fail/mut-pattern-mismatched.rs
This commit is contained in:
commit
6e806bdefd
@ -908,8 +908,7 @@ fn check_error_patterns(props: &TestProps,
|
||||
}
|
||||
if done { return; }
|
||||
|
||||
let missing_patterns =
|
||||
props.error_patterns.index(&(next_err_idx..));
|
||||
let missing_patterns = &props.error_patterns[next_err_idx..];
|
||||
if missing_patterns.len() == 1u {
|
||||
fatal_proc_rec(format!("error pattern '{}' not found!",
|
||||
missing_patterns[0]).as_slice(),
|
||||
|
@ -330,7 +330,7 @@ impl Bitv {
|
||||
|
||||
if extra_bytes > 0 {
|
||||
let mut last_word = 0u32;
|
||||
for (i, &byte) in bytes.index(&((complete_words*4)..)).iter().enumerate() {
|
||||
for (i, &byte) in bytes[(complete_words*4)..].iter().enumerate() {
|
||||
last_word |= (reverse_bits(byte) as u32) << (i * 8);
|
||||
}
|
||||
bitv.storage.push(last_word);
|
||||
|
@ -556,7 +556,7 @@ impl<T> RingBuf<T> {
|
||||
let buf = self.buffer_as_slice();
|
||||
if contiguous {
|
||||
let (empty, buf) = buf.split_at(0);
|
||||
(buf.index(&(self.tail..self.head)), empty)
|
||||
(&buf[self.tail..self.head], empty)
|
||||
} else {
|
||||
let (mid, right) = buf.split_at(self.tail);
|
||||
let (left, _) = mid.split_at(self.head);
|
||||
|
@ -55,7 +55,7 @@
|
||||
//! #![feature(slicing_syntax)]
|
||||
//! fn main() {
|
||||
//! let numbers = [0i, 1i, 2i];
|
||||
//! let last_numbers = numbers.index(&(1..3));
|
||||
//! let last_numbers = &numbers[1..3];
|
||||
//! // last_numbers is now &[1i, 2i]
|
||||
//! }
|
||||
//! ```
|
||||
@ -98,7 +98,7 @@ use core::iter::{range, range_step, MultiplicativeIterator};
|
||||
use core::marker::Sized;
|
||||
use core::mem::size_of;
|
||||
use core::mem;
|
||||
use core::ops::{FnMut, FullRange, Index, IndexMut};
|
||||
use core::ops::{FnMut, FullRange};
|
||||
use core::option::Option::{self, Some, None};
|
||||
use core::ptr::PtrExt;
|
||||
use core::ptr;
|
||||
@ -1065,12 +1065,12 @@ impl ElementSwaps {
|
||||
|
||||
#[unstable = "trait is unstable"]
|
||||
impl<T> BorrowFrom<Vec<T>> for [T] {
|
||||
fn borrow_from(owned: &Vec<T>) -> &[T] { owned.index(&FullRange) }
|
||||
fn borrow_from(owned: &Vec<T>) -> &[T] { &owned[] }
|
||||
}
|
||||
|
||||
#[unstable = "trait is unstable"]
|
||||
impl<T> BorrowFromMut<Vec<T>> for [T] {
|
||||
fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { owned.index_mut(&FullRange) }
|
||||
fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { &mut owned[] }
|
||||
}
|
||||
|
||||
#[unstable = "trait is unstable"]
|
||||
@ -1400,7 +1400,6 @@ mod tests {
|
||||
use core::prelude::{Ord, FullRange};
|
||||
use core::default::Default;
|
||||
use core::mem;
|
||||
use core::ops::Index;
|
||||
use std::iter::RandomAccessIterator;
|
||||
use std::rand::{Rng, thread_rng};
|
||||
use std::rc::Rc;
|
||||
@ -1611,7 +1610,7 @@ mod tests {
|
||||
|
||||
// Test on stack.
|
||||
let vec_stack: &[_] = &[1i, 2, 3];
|
||||
let v_b = vec_stack.index(&(1u..3u)).to_vec();
|
||||
let v_b = vec_stack[1u..3u].to_vec();
|
||||
assert_eq!(v_b.len(), 2u);
|
||||
let v_b = v_b.as_slice();
|
||||
assert_eq!(v_b[0], 2);
|
||||
@ -1619,7 +1618,7 @@ mod tests {
|
||||
|
||||
// Test `Box<[T]>`
|
||||
let vec_unique = vec![1i, 2, 3, 4, 5, 6];
|
||||
let v_d = vec_unique.index(&(1u..6u)).to_vec();
|
||||
let v_d = vec_unique[1u..6u].to_vec();
|
||||
assert_eq!(v_d.len(), 5u);
|
||||
let v_d = v_d.as_slice();
|
||||
assert_eq!(v_d[0], 2);
|
||||
@ -1632,21 +1631,21 @@ mod tests {
|
||||
#[test]
|
||||
fn test_slice_from() {
|
||||
let vec: &[int] = &[1, 2, 3, 4];
|
||||
assert_eq!(vec.index(&(0..)), vec);
|
||||
assert_eq!(&vec[0..], vec);
|
||||
let b: &[int] = &[3, 4];
|
||||
assert_eq!(vec.index(&(2..)), b);
|
||||
assert_eq!(&vec[2..], b);
|
||||
let b: &[int] = &[];
|
||||
assert_eq!(vec.index(&(4..)), b);
|
||||
assert_eq!(&vec[4..], b);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_slice_to() {
|
||||
let vec: &[int] = &[1, 2, 3, 4];
|
||||
assert_eq!(vec.index(&(0..4)), vec);
|
||||
assert_eq!(&vec[0..4], vec);
|
||||
let b: &[int] = &[1, 2];
|
||||
assert_eq!(vec.index(&(0..2)), b);
|
||||
assert_eq!(&vec[0..2], b);
|
||||
let b: &[int] = &[];
|
||||
assert_eq!(vec.index(&(0..0)), b);
|
||||
assert_eq!(&vec[0..0], b);
|
||||
}
|
||||
|
||||
|
||||
@ -2572,7 +2571,7 @@ mod tests {
|
||||
}
|
||||
assert_eq!(cnt, 3);
|
||||
|
||||
for f in v.index(&(1..3)).iter() {
|
||||
for f in v[1..3].iter() {
|
||||
assert!(*f == Foo);
|
||||
cnt += 1;
|
||||
}
|
||||
|
@ -386,7 +386,7 @@ macro_rules! utf8_acc_cont_byte {
|
||||
|
||||
#[unstable = "trait is unstable"]
|
||||
impl BorrowFrom<String> for str {
|
||||
fn borrow_from(owned: &String) -> &str { owned.index(&FullRange) }
|
||||
fn borrow_from(owned: &String) -> &str { &owned[] }
|
||||
}
|
||||
|
||||
#[unstable = "trait is unstable"]
|
||||
@ -464,7 +464,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
#[unstable = "this functionality may be moved to libunicode"]
|
||||
fn nfd_chars<'a>(&'a self) -> Decompositions<'a> {
|
||||
Decompositions {
|
||||
iter: self.index(&FullRange).chars(),
|
||||
iter: self[].chars(),
|
||||
buffer: Vec::new(),
|
||||
sorted: false,
|
||||
kind: Canonical
|
||||
@ -477,7 +477,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
#[unstable = "this functionality may be moved to libunicode"]
|
||||
fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> {
|
||||
Decompositions {
|
||||
iter: self.index(&FullRange).chars(),
|
||||
iter: self[].chars(),
|
||||
buffer: Vec::new(),
|
||||
sorted: false,
|
||||
kind: Compatible
|
||||
@ -525,7 +525,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn contains(&self, pat: &str) -> bool {
|
||||
core_str::StrExt::contains(self.index(&FullRange), pat)
|
||||
core_str::StrExt::contains(&self[], pat)
|
||||
}
|
||||
|
||||
/// Returns true if a string contains a char pattern.
|
||||
@ -541,7 +541,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "might get removed in favour of a more generic contains()"]
|
||||
fn contains_char<P: CharEq>(&self, pat: P) -> bool {
|
||||
core_str::StrExt::contains_char(self.index(&FullRange), pat)
|
||||
core_str::StrExt::contains_char(&self[], pat)
|
||||
}
|
||||
|
||||
/// An iterator over the characters of `self`. Note, this iterates
|
||||
@ -555,7 +555,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn chars(&self) -> Chars {
|
||||
core_str::StrExt::chars(self.index(&FullRange))
|
||||
core_str::StrExt::chars(&self[])
|
||||
}
|
||||
|
||||
/// An iterator over the bytes of `self`
|
||||
@ -568,13 +568,13 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn bytes(&self) -> Bytes {
|
||||
core_str::StrExt::bytes(self.index(&FullRange))
|
||||
core_str::StrExt::bytes(&self[])
|
||||
}
|
||||
|
||||
/// An iterator over the characters of `self` and their byte offsets.
|
||||
#[stable]
|
||||
fn char_indices(&self) -> CharIndices {
|
||||
core_str::StrExt::char_indices(self.index(&FullRange))
|
||||
core_str::StrExt::char_indices(&self[])
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -597,7 +597,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn split<P: CharEq>(&self, pat: P) -> Split<P> {
|
||||
core_str::StrExt::split(self.index(&FullRange), pat)
|
||||
core_str::StrExt::split(&self[], pat)
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -624,7 +624,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn splitn<P: CharEq>(&self, count: uint, pat: P) -> SplitN<P> {
|
||||
core_str::StrExt::splitn(self.index(&FullRange), count, pat)
|
||||
core_str::StrExt::splitn(&self[], count, pat)
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -653,7 +653,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "might get removed"]
|
||||
fn split_terminator<P: CharEq>(&self, pat: P) -> SplitTerminator<P> {
|
||||
core_str::StrExt::split_terminator(self.index(&FullRange), pat)
|
||||
core_str::StrExt::split_terminator(&self[], pat)
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -674,7 +674,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn rsplitn<P: CharEq>(&self, count: uint, pat: P) -> RSplitN<P> {
|
||||
core_str::StrExt::rsplitn(self.index(&FullRange), count, pat)
|
||||
core_str::StrExt::rsplitn(&self[], count, pat)
|
||||
}
|
||||
|
||||
/// An iterator over the start and end indices of the disjoint
|
||||
@ -699,7 +699,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "might have its iterator type changed"]
|
||||
fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> {
|
||||
core_str::StrExt::match_indices(self.index(&FullRange), pat)
|
||||
core_str::StrExt::match_indices(&self[], pat)
|
||||
}
|
||||
|
||||
/// An iterator over the substrings of `self` separated by the pattern `sep`.
|
||||
@ -715,7 +715,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "might get removed in the future in favor of a more generic split()"]
|
||||
fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> {
|
||||
core_str::StrExt::split_str(self.index(&FullRange), pat)
|
||||
core_str::StrExt::split_str(&self[], pat)
|
||||
}
|
||||
|
||||
/// An iterator over the lines of a string (subsequences separated
|
||||
@ -731,7 +731,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn lines(&self) -> Lines {
|
||||
core_str::StrExt::lines(self.index(&FullRange))
|
||||
core_str::StrExt::lines(&self[])
|
||||
}
|
||||
|
||||
/// An iterator over the lines of a string, separated by either
|
||||
@ -747,7 +747,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn lines_any(&self) -> LinesAny {
|
||||
core_str::StrExt::lines_any(self.index(&FullRange))
|
||||
core_str::StrExt::lines_any(&self[])
|
||||
}
|
||||
|
||||
/// Returns a slice of the given string from the byte range
|
||||
@ -782,7 +782,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "use slice notation [a..b] instead"]
|
||||
fn slice(&self, begin: uint, end: uint) -> &str {
|
||||
core_str::StrExt::slice(self.index(&FullRange), begin, end)
|
||||
core_str::StrExt::slice(&self[], begin, end)
|
||||
}
|
||||
|
||||
/// Returns a slice of the string from `begin` to its end.
|
||||
@ -795,7 +795,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// See also `slice`, `slice_to` and `slice_chars`.
|
||||
#[unstable = "use slice notation [a..] instead"]
|
||||
fn slice_from(&self, begin: uint) -> &str {
|
||||
core_str::StrExt::slice_from(self.index(&FullRange), begin)
|
||||
core_str::StrExt::slice_from(&self[], begin)
|
||||
}
|
||||
|
||||
/// Returns a slice of the string from the beginning to byte
|
||||
@ -809,7 +809,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// See also `slice`, `slice_from` and `slice_chars`.
|
||||
#[unstable = "use slice notation [0..a] instead"]
|
||||
fn slice_to(&self, end: uint) -> &str {
|
||||
core_str::StrExt::slice_to(self.index(&FullRange), end)
|
||||
core_str::StrExt::slice_to(&self[], end)
|
||||
}
|
||||
|
||||
/// Returns a slice of the string from the character range
|
||||
@ -837,7 +837,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "may have yet to prove its worth"]
|
||||
fn slice_chars(&self, begin: uint, end: uint) -> &str {
|
||||
core_str::StrExt::slice_chars(self.index(&FullRange), begin, end)
|
||||
core_str::StrExt::slice_chars(&self[], begin, end)
|
||||
}
|
||||
|
||||
/// Takes a bytewise (not UTF-8) slice from a string.
|
||||
@ -848,7 +848,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// the entire slice as well.
|
||||
#[stable]
|
||||
unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str {
|
||||
core_str::StrExt::slice_unchecked(self.index(&FullRange), begin, end)
|
||||
core_str::StrExt::slice_unchecked(&self[], begin, end)
|
||||
}
|
||||
|
||||
/// Returns true if the pattern `pat` is a prefix of the string.
|
||||
@ -860,7 +860,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn starts_with(&self, pat: &str) -> bool {
|
||||
core_str::StrExt::starts_with(self.index(&FullRange), pat)
|
||||
core_str::StrExt::starts_with(&self[], pat)
|
||||
}
|
||||
|
||||
/// Returns true if the pattern `pat` is a suffix of the string.
|
||||
@ -872,7 +872,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn ends_with(&self, pat: &str) -> bool {
|
||||
core_str::StrExt::ends_with(self.index(&FullRange), pat)
|
||||
core_str::StrExt::ends_with(&self[], pat)
|
||||
}
|
||||
|
||||
/// Returns a string with all pre- and suffixes that match
|
||||
@ -892,7 +892,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn trim_matches<P: CharEq>(&self, pat: P) -> &str {
|
||||
core_str::StrExt::trim_matches(self.index(&FullRange), pat)
|
||||
core_str::StrExt::trim_matches(&self[], pat)
|
||||
}
|
||||
|
||||
/// Returns a string with all prefixes that match
|
||||
@ -912,7 +912,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn trim_left_matches<P: CharEq>(&self, pat: P) -> &str {
|
||||
core_str::StrExt::trim_left_matches(self.index(&FullRange), pat)
|
||||
core_str::StrExt::trim_left_matches(&self[], pat)
|
||||
}
|
||||
|
||||
/// Returns a string with all suffixes that match
|
||||
@ -932,7 +932,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn trim_right_matches<P: CharEq>(&self, pat: P) -> &str {
|
||||
core_str::StrExt::trim_right_matches(self.index(&FullRange), pat)
|
||||
core_str::StrExt::trim_right_matches(&self[], pat)
|
||||
}
|
||||
|
||||
/// Check that `index`-th byte lies at the start and/or end of a
|
||||
@ -960,7 +960,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "naming is uncertain with container conventions"]
|
||||
fn is_char_boundary(&self, index: uint) -> bool {
|
||||
core_str::StrExt::is_char_boundary(self.index(&FullRange), index)
|
||||
core_str::StrExt::is_char_boundary(&self[], index)
|
||||
}
|
||||
|
||||
/// Pluck a character out of a string and return the index of the next
|
||||
@ -1018,7 +1018,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// If `i` is not the index of the beginning of a valid UTF-8 character.
|
||||
#[unstable = "naming is uncertain with container conventions"]
|
||||
fn char_range_at(&self, start: uint) -> CharRange {
|
||||
core_str::StrExt::char_range_at(self.index(&FullRange), start)
|
||||
core_str::StrExt::char_range_at(&self[], start)
|
||||
}
|
||||
|
||||
/// Given a byte position and a str, return the previous char and its position.
|
||||
@ -1033,7 +1033,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// If `i` is not an index following a valid UTF-8 character.
|
||||
#[unstable = "naming is uncertain with container conventions"]
|
||||
fn char_range_at_reverse(&self, start: uint) -> CharRange {
|
||||
core_str::StrExt::char_range_at_reverse(self.index(&FullRange), start)
|
||||
core_str::StrExt::char_range_at_reverse(&self[], start)
|
||||
}
|
||||
|
||||
/// Plucks the character starting at the `i`th byte of a string.
|
||||
@ -1053,7 +1053,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// If `i` is not the index of the beginning of a valid UTF-8 character.
|
||||
#[unstable = "naming is uncertain with container conventions"]
|
||||
fn char_at(&self, i: uint) -> char {
|
||||
core_str::StrExt::char_at(self.index(&FullRange), i)
|
||||
core_str::StrExt::char_at(&self[], i)
|
||||
}
|
||||
|
||||
/// Plucks the character ending at the `i`th byte of a string.
|
||||
@ -1064,7 +1064,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// If `i` is not an index following a valid UTF-8 character.
|
||||
#[unstable = "naming is uncertain with container conventions"]
|
||||
fn char_at_reverse(&self, i: uint) -> char {
|
||||
core_str::StrExt::char_at_reverse(self.index(&FullRange), i)
|
||||
core_str::StrExt::char_at_reverse(&self[], i)
|
||||
}
|
||||
|
||||
/// Work with the byte buffer of a string as a byte slice.
|
||||
@ -1076,7 +1076,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn as_bytes(&self) -> &[u8] {
|
||||
core_str::StrExt::as_bytes(self.index(&FullRange))
|
||||
core_str::StrExt::as_bytes(&self[])
|
||||
}
|
||||
|
||||
/// Returns the byte index of the first character of `self` that
|
||||
@ -1104,7 +1104,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn find<P: CharEq>(&self, pat: P) -> Option<uint> {
|
||||
core_str::StrExt::find(self.index(&FullRange), pat)
|
||||
core_str::StrExt::find(&self[], pat)
|
||||
}
|
||||
|
||||
/// Returns the byte index of the last character of `self` that
|
||||
@ -1132,7 +1132,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn rfind<P: CharEq>(&self, pat: P) -> Option<uint> {
|
||||
core_str::StrExt::rfind(self.index(&FullRange), pat)
|
||||
core_str::StrExt::rfind(&self[], pat)
|
||||
}
|
||||
|
||||
/// Returns the byte index of the first matching substring
|
||||
@ -1156,7 +1156,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "might get removed in favor of a more generic find in the future"]
|
||||
fn find_str(&self, needle: &str) -> Option<uint> {
|
||||
core_str::StrExt::find_str(self.index(&FullRange), needle)
|
||||
core_str::StrExt::find_str(&self[], needle)
|
||||
}
|
||||
|
||||
/// Retrieves the first character from a string slice and returns
|
||||
@ -1179,7 +1179,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "awaiting conventions about shifting and slices"]
|
||||
fn slice_shift_char(&self) -> Option<(char, &str)> {
|
||||
core_str::StrExt::slice_shift_char(self.index(&FullRange))
|
||||
core_str::StrExt::slice_shift_char(&self[])
|
||||
}
|
||||
|
||||
/// Returns the byte offset of an inner slice relative to an enclosing outer slice.
|
||||
@ -1198,7 +1198,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "awaiting convention about comparability of arbitrary slices"]
|
||||
fn subslice_offset(&self, inner: &str) -> uint {
|
||||
core_str::StrExt::subslice_offset(self.index(&FullRange), inner)
|
||||
core_str::StrExt::subslice_offset(&self[], inner)
|
||||
}
|
||||
|
||||
/// Return an unsafe pointer to the strings buffer.
|
||||
@ -1209,13 +1209,13 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
#[stable]
|
||||
#[inline]
|
||||
fn as_ptr(&self) -> *const u8 {
|
||||
core_str::StrExt::as_ptr(self.index(&FullRange))
|
||||
core_str::StrExt::as_ptr(&self[])
|
||||
}
|
||||
|
||||
/// Return an iterator of `u16` over the string encoded as UTF-16.
|
||||
#[unstable = "this functionality may only be provided by libunicode"]
|
||||
fn utf16_units(&self) -> Utf16Units {
|
||||
Utf16Units { encoder: Utf16Encoder::new(self.index(&FullRange).chars()) }
|
||||
Utf16Units { encoder: Utf16Encoder::new(self[].chars()) }
|
||||
}
|
||||
|
||||
/// Return the number of bytes in this string
|
||||
@ -1229,7 +1229,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
#[stable]
|
||||
#[inline]
|
||||
fn len(&self) -> uint {
|
||||
core_str::StrExt::len(self.index(&FullRange))
|
||||
core_str::StrExt::len(&self[])
|
||||
}
|
||||
|
||||
/// Returns true if this slice contains no bytes
|
||||
@ -1242,7 +1242,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
#[inline]
|
||||
#[stable]
|
||||
fn is_empty(&self) -> bool {
|
||||
core_str::StrExt::is_empty(self.index(&FullRange))
|
||||
core_str::StrExt::is_empty(&self[])
|
||||
}
|
||||
|
||||
/// Parse this string into the specified type.
|
||||
@ -1256,7 +1256,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
#[inline]
|
||||
#[unstable = "this method was just created"]
|
||||
fn parse<F: FromStr>(&self) -> Option<F> {
|
||||
core_str::StrExt::parse(self.index(&FullRange))
|
||||
core_str::StrExt::parse(&self[])
|
||||
}
|
||||
|
||||
/// Returns an iterator over the
|
||||
@ -1280,7 +1280,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "this functionality may only be provided by libunicode"]
|
||||
fn graphemes(&self, is_extended: bool) -> Graphemes {
|
||||
UnicodeStr::graphemes(self.index(&FullRange), is_extended)
|
||||
UnicodeStr::graphemes(&self[], is_extended)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the grapheme clusters of self and their byte offsets.
|
||||
@ -1295,7 +1295,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[unstable = "this functionality may only be provided by libunicode"]
|
||||
fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices {
|
||||
UnicodeStr::grapheme_indices(self.index(&FullRange), is_extended)
|
||||
UnicodeStr::grapheme_indices(&self[], is_extended)
|
||||
}
|
||||
|
||||
/// An iterator over the words of a string (subsequences separated
|
||||
@ -1311,7 +1311,7 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// ```
|
||||
#[stable]
|
||||
fn words(&self) -> Words {
|
||||
UnicodeStr::words(self.index(&FullRange))
|
||||
UnicodeStr::words(&self[])
|
||||
}
|
||||
|
||||
/// Returns a string's displayed width in columns, treating control
|
||||
@ -1325,25 +1325,25 @@ pub trait StrExt: Index<FullRange, Output = str> {
|
||||
/// `is_cjk` = `false`) if the locale is unknown.
|
||||
#[unstable = "this functionality may only be provided by libunicode"]
|
||||
fn width(&self, is_cjk: bool) -> uint {
|
||||
UnicodeStr::width(self.index(&FullRange), is_cjk)
|
||||
UnicodeStr::width(&self[], is_cjk)
|
||||
}
|
||||
|
||||
/// Returns a string with leading and trailing whitespace removed.
|
||||
#[stable]
|
||||
fn trim(&self) -> &str {
|
||||
UnicodeStr::trim(self.index(&FullRange))
|
||||
UnicodeStr::trim(&self[])
|
||||
}
|
||||
|
||||
/// Returns a string with leading whitespace removed.
|
||||
#[stable]
|
||||
fn trim_left(&self) -> &str {
|
||||
UnicodeStr::trim_left(self.index(&FullRange))
|
||||
UnicodeStr::trim_left(&self[])
|
||||
}
|
||||
|
||||
/// Returns a string with trailing whitespace removed.
|
||||
#[stable]
|
||||
fn trim_right(&self) -> &str {
|
||||
UnicodeStr::trim_right(self.index(&FullRange))
|
||||
UnicodeStr::trim_right(&self[])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -168,7 +168,7 @@ impl String {
|
||||
|
||||
if i > 0 {
|
||||
unsafe {
|
||||
res.as_mut_vec().push_all(v.index(&(0..i)))
|
||||
res.as_mut_vec().push_all(&v[0..i])
|
||||
};
|
||||
}
|
||||
|
||||
@ -185,7 +185,7 @@ impl String {
|
||||
macro_rules! error { () => ({
|
||||
unsafe {
|
||||
if subseqidx != i_ {
|
||||
res.as_mut_vec().push_all(v.index(&(subseqidx..i_)));
|
||||
res.as_mut_vec().push_all(&v[subseqidx..i_]);
|
||||
}
|
||||
subseqidx = i;
|
||||
res.as_mut_vec().push_all(REPLACEMENT);
|
||||
@ -254,7 +254,7 @@ impl String {
|
||||
}
|
||||
if subseqidx < total {
|
||||
unsafe {
|
||||
res.as_mut_vec().push_all(v.index(&(subseqidx..total)))
|
||||
res.as_mut_vec().push_all(&v[subseqidx..total])
|
||||
};
|
||||
}
|
||||
Cow::Owned(res)
|
||||
@ -852,21 +852,21 @@ impl ops::Index<ops::Range<uint>> for String {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn index(&self, index: &ops::Range<uint>) -> &str {
|
||||
&self.index(&FullRange)[*index]
|
||||
&self[][*index]
|
||||
}
|
||||
}
|
||||
impl ops::Index<ops::RangeTo<uint>> for String {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn index(&self, index: &ops::RangeTo<uint>) -> &str {
|
||||
&self.index(&FullRange)[*index]
|
||||
&self[][*index]
|
||||
}
|
||||
}
|
||||
impl ops::Index<ops::RangeFrom<uint>> for String {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn index(&self, index: &ops::RangeFrom<uint>) -> &str {
|
||||
&self.index(&FullRange)[*index]
|
||||
&self[][*index]
|
||||
}
|
||||
}
|
||||
impl ops::Index<ops::FullRange> for String {
|
||||
@ -882,7 +882,7 @@ impl ops::Deref for String {
|
||||
type Target = str;
|
||||
|
||||
fn deref<'a>(&'a self) -> &'a str {
|
||||
unsafe { mem::transmute(self.vec.index(&FullRange)) }
|
||||
unsafe { mem::transmute(&self.vec[]) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1178,7 +1178,7 @@ impl<T:Clone> Clone for Vec<T> {
|
||||
|
||||
// self.len <= other.len due to the truncate above, so the
|
||||
// slice here is always in-bounds.
|
||||
let slice = other.index(&(self.len()..));
|
||||
let slice = &other[self.len()..];
|
||||
self.push_all(slice);
|
||||
}
|
||||
}
|
||||
@ -2031,7 +2031,7 @@ mod tests {
|
||||
v.push(());
|
||||
assert_eq!(v.iter_mut().count(), 4);
|
||||
|
||||
for &() in v.iter_mut() {}
|
||||
for &mut () in v.iter_mut() {}
|
||||
unsafe { v.set_len(0); }
|
||||
assert_eq!(v.iter_mut().count(), 0);
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ use clone::Clone;
|
||||
use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};
|
||||
use fmt;
|
||||
use marker::Copy;
|
||||
use ops::{Deref, FullRange, Index};
|
||||
use ops::{Deref, FullRange};
|
||||
use option::Option;
|
||||
|
||||
// macro for implementing n-ary tuple functions and operations
|
||||
@ -35,7 +35,7 @@ macro_rules! array_impls {
|
||||
#[unstable = "waiting for Show to stabilize"]
|
||||
impl<T:fmt::Show> fmt::Show for [T; $N] {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Show::fmt(&self.index(&FullRange), f)
|
||||
fmt::Show::fmt(&&self[], f)
|
||||
}
|
||||
}
|
||||
|
||||
@ -43,11 +43,11 @@ macro_rules! array_impls {
|
||||
impl<A, B> PartialEq<[B; $N]> for [A; $N] where A: PartialEq<B> {
|
||||
#[inline]
|
||||
fn eq(&self, other: &[B; $N]) -> bool {
|
||||
self.index(&FullRange) == other.index(&FullRange)
|
||||
&self[] == &other[]
|
||||
}
|
||||
#[inline]
|
||||
fn ne(&self, other: &[B; $N]) -> bool {
|
||||
self.index(&FullRange) != other.index(&FullRange)
|
||||
&self[] != &other[]
|
||||
}
|
||||
}
|
||||
|
||||
@ -58,11 +58,11 @@ macro_rules! array_impls {
|
||||
{
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: &Rhs) -> bool {
|
||||
PartialEq::eq(self.index(&FullRange), &**other)
|
||||
PartialEq::eq(&self[], &**other)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: &Rhs) -> bool {
|
||||
PartialEq::ne(self.index(&FullRange), &**other)
|
||||
PartialEq::ne(&self[], &**other)
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,11 +73,11 @@ macro_rules! array_impls {
|
||||
{
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: &[B; $N]) -> bool {
|
||||
PartialEq::eq(&**self, other.index(&FullRange))
|
||||
PartialEq::eq(&**self, &other[])
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: &[B; $N]) -> bool {
|
||||
PartialEq::ne(&**self, other.index(&FullRange))
|
||||
PartialEq::ne(&**self, &other[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,23 +88,23 @@ macro_rules! array_impls {
|
||||
impl<T:PartialOrd> PartialOrd for [T; $N] {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
|
||||
PartialOrd::partial_cmp(&self.index(&FullRange), &other.index(&FullRange))
|
||||
PartialOrd::partial_cmp(&&self[], &&other[])
|
||||
}
|
||||
#[inline]
|
||||
fn lt(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::lt(&self.index(&FullRange), &other.index(&FullRange))
|
||||
PartialOrd::lt(&&self[], &&other[])
|
||||
}
|
||||
#[inline]
|
||||
fn le(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::le(&self.index(&FullRange), &other.index(&FullRange))
|
||||
PartialOrd::le(&&self[], &&other[])
|
||||
}
|
||||
#[inline]
|
||||
fn ge(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::ge(&self.index(&FullRange), &other.index(&FullRange))
|
||||
PartialOrd::ge(&&self[], &&other[])
|
||||
}
|
||||
#[inline]
|
||||
fn gt(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::gt(&self.index(&FullRange), &other.index(&FullRange))
|
||||
PartialOrd::gt(&&self[], &&other[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,7 +112,7 @@ macro_rules! array_impls {
|
||||
impl<T:Ord> Ord for [T; $N] {
|
||||
#[inline]
|
||||
fn cmp(&self, other: &[T; $N]) -> Ordering {
|
||||
Ord::cmp(&self.index(&FullRange), &other.index(&FullRange))
|
||||
Ord::cmp(&&self[], &&other[])
|
||||
}
|
||||
}
|
||||
)+
|
||||
|
@ -20,7 +20,7 @@ use fmt;
|
||||
use iter::{IteratorExt, range};
|
||||
use num::{cast, Float, ToPrimitive};
|
||||
use num::FpCategory as Fp;
|
||||
use ops::{FnOnce, Index};
|
||||
use ops::FnOnce;
|
||||
use result::Result::Ok;
|
||||
use slice::{self, SliceExt};
|
||||
use str::{self, StrExt};
|
||||
@ -332,5 +332,5 @@ pub fn float_to_str_bytes_common<T: Float, U, F>(
|
||||
}
|
||||
}
|
||||
|
||||
f(unsafe { str::from_utf8_unchecked(buf.index(&(0..end))) })
|
||||
f(unsafe { str::from_utf8_unchecked(&buf[0..end]) })
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ use mem;
|
||||
use option::Option;
|
||||
use option::Option::{Some, None};
|
||||
use result::Result::Ok;
|
||||
use ops::{Deref, FnOnce, Index};
|
||||
use ops::{Deref, FnOnce};
|
||||
use result;
|
||||
use slice::SliceExt;
|
||||
use slice;
|
||||
@ -425,7 +425,7 @@ impl<'a> Formatter<'a> {
|
||||
for c in sign.into_iter() {
|
||||
let mut b = [0; 4];
|
||||
let n = c.encode_utf8(&mut b).unwrap_or(0);
|
||||
let b = unsafe { str::from_utf8_unchecked(b.index(&(0..n))) };
|
||||
let b = unsafe { str::from_utf8_unchecked(&b[0..n]) };
|
||||
try!(f.buf.write_str(b));
|
||||
}
|
||||
if prefixed { f.buf.write_str(prefix) }
|
||||
@ -533,7 +533,7 @@ impl<'a> Formatter<'a> {
|
||||
|
||||
let mut fill = [0u8; 4];
|
||||
let len = self.fill.encode_utf8(&mut fill).unwrap_or(0);
|
||||
let fill = unsafe { str::from_utf8_unchecked(fill.index(&(..len))) };
|
||||
let fill = unsafe { str::from_utf8_unchecked(&fill[..len]) };
|
||||
|
||||
for _ in range(0, pre_pad) {
|
||||
try!(self.buf.write_str(fill));
|
||||
@ -668,7 +668,7 @@ impl String for char {
|
||||
fn fmt(&self, f: &mut Formatter) -> Result {
|
||||
let mut utf8 = [0u8; 4];
|
||||
let amt = self.encode_utf8(&mut utf8).unwrap_or(0);
|
||||
let s: &str = unsafe { mem::transmute(utf8.index(&(0..amt))) };
|
||||
let s: &str = unsafe { mem::transmute(&utf8[0..amt]) };
|
||||
String::fmt(s, f)
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,6 @@
|
||||
|
||||
use fmt;
|
||||
use iter::IteratorExt;
|
||||
use ops::Index;
|
||||
use num::{Int, cast};
|
||||
use slice::SliceExt;
|
||||
use str;
|
||||
@ -62,7 +61,7 @@ trait GenericRadix {
|
||||
if x == zero { break }; // No more digits left to accumulate.
|
||||
}
|
||||
}
|
||||
let buf = unsafe { str::from_utf8_unchecked(buf.index(&(curr..))) };
|
||||
let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) };
|
||||
f.pad_integral(is_positive, self.prefix(), buf)
|
||||
}
|
||||
}
|
||||
|
@ -2344,7 +2344,7 @@ impl<A, I, F> RandomAccessIterator for Inspect<A, I, F> where
|
||||
///
|
||||
/// // This iterator will yield up to the last Fibonacci number before the max value of `u32`.
|
||||
/// // You can simply change `u32` to `u64` in this line if you want higher values than that.
|
||||
/// let mut fibonacci = Unfold::new((Some(0u32), Some(1u32)), |&(ref mut x2, ref mut x1)| {
|
||||
/// let mut fibonacci = Unfold::new((Some(0u32), Some(1u32)), |&mut (ref mut x2, ref mut x1)| {
|
||||
/// // Attempt to get the next Fibonacci number
|
||||
/// // `x1` will be `None` if previously overflowed.
|
||||
/// let next = match (*x2, *x1) {
|
||||
|
@ -24,7 +24,7 @@ use iter::IteratorExt;
|
||||
use marker::Copy;
|
||||
use mem::size_of;
|
||||
use ops::{Add, Sub, Mul, Div, Rem, Neg};
|
||||
use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr, Index};
|
||||
use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr};
|
||||
use option::Option;
|
||||
use option::Option::{Some, None};
|
||||
use str::{FromStr, StrExt};
|
||||
@ -1577,7 +1577,7 @@ macro_rules! from_str_radix_float_impl {
|
||||
};
|
||||
|
||||
// Parse the exponent as decimal integer
|
||||
let src = src.index(&(offset..));
|
||||
let src = &src[offset..];
|
||||
let (is_positive, exp) = match src.slice_shift_char() {
|
||||
Some(('-', src)) => (false, src.parse::<uint>()),
|
||||
Some(('+', src)) => (true, src.parse::<uint>()),
|
||||
|
@ -534,7 +534,7 @@ impl<T> Option<T> {
|
||||
/// ```
|
||||
/// let mut x = Some(4u);
|
||||
/// match x.iter_mut().next() {
|
||||
/// Some(&ref mut v) => *v = 42u,
|
||||
/// Some(&mut ref mut v) => *v = 42u,
|
||||
/// None => {},
|
||||
/// }
|
||||
/// assert_eq!(x, Some(42));
|
||||
|
@ -383,8 +383,8 @@ impl<T, E> Result<T, E> {
|
||||
/// ```
|
||||
/// fn mutate(r: &mut Result<int, int>) {
|
||||
/// match r.as_mut() {
|
||||
/// Ok(&ref mut v) => *v = 42,
|
||||
/// Err(&ref mut e) => *e = 0,
|
||||
/// Ok(&mut ref mut v) => *v = 42,
|
||||
/// Err(&mut ref mut e) => *e = 0,
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
@ -529,7 +529,7 @@ impl<T, E> Result<T, E> {
|
||||
/// ```
|
||||
/// let mut x: Result<uint, &str> = Ok(7);
|
||||
/// match x.iter_mut().next() {
|
||||
/// Some(&ref mut x) => *x = 40,
|
||||
/// Some(&mut ref mut x) => *x = 40,
|
||||
/// None => {},
|
||||
/// }
|
||||
/// assert_eq!(x, Ok(40));
|
||||
|
@ -159,7 +159,7 @@ impl<T> SliceExt for [T] {
|
||||
|
||||
#[inline]
|
||||
fn split_at(&self, mid: uint) -> (&[T], &[T]) {
|
||||
(self.index(&(0..mid)), self.index(&(mid..)))
|
||||
(&self[0..mid], &self[mid..])
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -236,11 +236,11 @@ impl<T> SliceExt for [T] {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn tail(&self) -> &[T] { self.index(&(1..)) }
|
||||
fn tail(&self) -> &[T] { &self[1..] }
|
||||
|
||||
#[inline]
|
||||
fn init(&self) -> &[T] {
|
||||
self.index(&(0..(self.len() - 1)))
|
||||
&self[0..(self.len() - 1)]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -443,13 +443,13 @@ impl<T> SliceExt for [T] {
|
||||
#[inline]
|
||||
fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
|
||||
let n = needle.len();
|
||||
self.len() >= n && needle == self.index(&(0..n))
|
||||
self.len() >= n && needle == &self[0..n]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
|
||||
let (m, n) = (self.len(), needle.len());
|
||||
m >= n && needle == self.index(&((m-n)..))
|
||||
m >= n && needle == &self[(m-n)..]
|
||||
}
|
||||
|
||||
#[unstable]
|
||||
@ -972,8 +972,8 @@ impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
|
||||
match self.v.iter().position(|x| (self.pred)(x)) {
|
||||
None => self.finish(),
|
||||
Some(idx) => {
|
||||
let ret = Some(self.v.index(&(0..idx)));
|
||||
self.v = self.v.index(&((idx + 1)..));
|
||||
let ret = Some(&self.v[0..idx]);
|
||||
self.v = &self.v[(idx + 1)..];
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -998,8 +998,8 @@ impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> boo
|
||||
match self.v.iter().rposition(|x| (self.pred)(x)) {
|
||||
None => self.finish(),
|
||||
Some(idx) => {
|
||||
let ret = Some(self.v.index(&((idx + 1)..)));
|
||||
self.v = self.v.index(&(0..idx));
|
||||
let ret = Some(&self.v[(idx + 1)..]);
|
||||
self.v = &self.v[0..idx];
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -1195,8 +1195,8 @@ impl<'a, T> Iterator for Windows<'a, T> {
|
||||
if self.size > self.v.len() {
|
||||
None
|
||||
} else {
|
||||
let ret = Some(self.v.index(&(0..self.size)));
|
||||
self.v = self.v.index(&(1..));
|
||||
let ret = Some(&self.v[0..self.size]);
|
||||
self.v = &self.v[1..];
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -1283,7 +1283,7 @@ impl<'a, T> RandomAccessIterator for Chunks<'a, T> {
|
||||
let mut hi = lo + self.size;
|
||||
if hi < lo || hi > self.v.len() { hi = self.v.len(); }
|
||||
|
||||
Some(self.v.index(&(lo..hi)))
|
||||
Some(&self.v[lo..hi])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator};
|
||||
use marker::Sized;
|
||||
use mem;
|
||||
use num::Int;
|
||||
use ops::{Fn, FnMut, Index};
|
||||
use ops::{Fn, FnMut};
|
||||
use option::Option::{self, None, Some};
|
||||
use ptr::PtrExt;
|
||||
use raw::{Repr, Slice};
|
||||
@ -580,7 +580,7 @@ impl NaiveSearcher {
|
||||
|
||||
fn next(&mut self, haystack: &[u8], needle: &[u8]) -> Option<(uint, uint)> {
|
||||
while self.position + needle.len() <= haystack.len() {
|
||||
if haystack.index(&(self.position .. self.position + needle.len())) == needle {
|
||||
if &haystack[self.position .. self.position + needle.len()] == needle {
|
||||
let match_pos = self.position;
|
||||
self.position += needle.len(); // add 1 for all matches
|
||||
return Some((match_pos, match_pos + needle.len()));
|
||||
@ -701,10 +701,10 @@ impl TwoWaySearcher {
|
||||
//
|
||||
// What's going on is we have some critical factorization (u, v) of the
|
||||
// needle, and we want to determine whether u is a suffix of
|
||||
// v.index(&(0..period)). If it is, we use "Algorithm CP1". Otherwise we use
|
||||
// &v[0..period]. If it is, we use "Algorithm CP1". Otherwise we use
|
||||
// "Algorithm CP2", which is optimized for when the period of the needle
|
||||
// is large.
|
||||
if needle.index(&(0..crit_pos)) == needle.index(&(period.. period + crit_pos)) {
|
||||
if &needle[0..crit_pos] == &needle[period.. period + crit_pos] {
|
||||
TwoWaySearcher {
|
||||
crit_pos: crit_pos,
|
||||
period: period,
|
||||
@ -1412,13 +1412,13 @@ impl StrExt for str {
|
||||
#[inline]
|
||||
fn starts_with(&self, needle: &str) -> bool {
|
||||
let n = needle.len();
|
||||
self.len() >= n && needle.as_bytes() == self.as_bytes().index(&(0..n))
|
||||
self.len() >= n && needle.as_bytes() == &self.as_bytes()[0..n]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn ends_with(&self, needle: &str) -> bool {
|
||||
let (m, n) = (self.len(), needle.len());
|
||||
m >= n && needle.as_bytes() == self.as_bytes().index(&((m-n)..))
|
||||
m >= n && needle.as_bytes() == &self.as_bytes()[(m-n)..]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -101,12 +101,12 @@ fn any_downcast_mut() {
|
||||
}
|
||||
|
||||
match a_r.downcast_mut::<uint>() {
|
||||
Some(&612) => {}
|
||||
Some(&mut 612) => {}
|
||||
x => panic!("Unexpected value {:?}", x)
|
||||
}
|
||||
|
||||
match b_r.downcast_mut::<uint>() {
|
||||
Some(&413) => {}
|
||||
Some(&mut 413) => {}
|
||||
x => panic!("Unexpected value {:?}", x)
|
||||
}
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ fn test_encode_utf8() {
|
||||
fn check(input: char, expect: &[u8]) {
|
||||
let mut buf = [0u8; 4];
|
||||
let n = input.encode_utf8(buf.as_mut_slice()).unwrap_or(0);
|
||||
assert_eq!(buf.index(&(0..n)), expect);
|
||||
assert_eq!(&buf[0..n], expect);
|
||||
}
|
||||
|
||||
check('x', &[0x78]);
|
||||
@ -181,7 +181,7 @@ fn test_encode_utf16() {
|
||||
fn check(input: char, expect: &[u16]) {
|
||||
let mut buf = [0u16; 2];
|
||||
let n = input.encode_utf16(buf.as_mut_slice()).unwrap_or(0);
|
||||
assert_eq!(buf.index(&(0..n)), expect);
|
||||
assert_eq!(&buf[0..n], expect);
|
||||
}
|
||||
|
||||
check('x', &[0x0078]);
|
||||
|
@ -57,17 +57,17 @@ fn iterator_to_slice() {
|
||||
}
|
||||
{
|
||||
let mut iter = data.iter_mut();
|
||||
assert_eq!(iter.index(&FullRange), other_data.index(&FullRange));
|
||||
assert_eq!(&iter[], &other_data[]);
|
||||
// mutability:
|
||||
assert!(&mut iter[] == other_data);
|
||||
|
||||
iter.next();
|
||||
assert_eq!(iter.index(&FullRange), other_data.index(&(1..)));
|
||||
assert_eq!(&iter[], &other_data[1..]);
|
||||
assert!(&mut iter[] == &mut other_data[1..]);
|
||||
|
||||
iter.next_back();
|
||||
|
||||
assert_eq!(iter.index(&FullRange), other_data.index(&(1..2)));
|
||||
assert_eq!(&iter[], &other_data[1..2]);
|
||||
assert!(&mut iter[] == &mut other_data[1..2]);
|
||||
|
||||
let s = iter.into_slice();
|
||||
|
@ -212,12 +212,12 @@ impl<'a> Parser<'a> {
|
||||
self.cur.next();
|
||||
}
|
||||
Some((_, other)) => {
|
||||
self.err(format!("expected `{:?}`, found `{:?}`", c,
|
||||
other).index(&FullRange));
|
||||
self.err(&format!("expected `{:?}`, found `{:?}`", c,
|
||||
other)[]);
|
||||
}
|
||||
None => {
|
||||
self.err(format!("expected `{:?}` but string was terminated",
|
||||
c).index(&FullRange));
|
||||
self.err(&format!("expected `{:?}` but string was terminated",
|
||||
c)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -240,12 +240,12 @@ impl<'a> Parser<'a> {
|
||||
// we may not consume the character, so clone the iterator
|
||||
match self.cur.clone().next() {
|
||||
Some((pos, '}')) | Some((pos, '{')) => {
|
||||
return self.input.index(&(start..pos));
|
||||
return &self.input[start..pos];
|
||||
}
|
||||
Some(..) => { self.cur.next(); }
|
||||
None => {
|
||||
self.cur.next();
|
||||
return self.input.index(&(start..self.input.len()));
|
||||
return &self.input[start..self.input.len()];
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -285,7 +285,7 @@ impl<'a> Parser<'a> {
|
||||
flags: 0,
|
||||
precision: CountImplied,
|
||||
width: CountImplied,
|
||||
ty: self.input.index(&(0..0)),
|
||||
ty: &self.input[0..0],
|
||||
};
|
||||
if !self.consume(':') { return spec }
|
||||
|
||||
@ -394,7 +394,7 @@ impl<'a> Parser<'a> {
|
||||
self.cur.next();
|
||||
pos
|
||||
}
|
||||
Some(..) | None => { return self.input.index(&(0..0)); }
|
||||
Some(..) | None => { return &self.input[0..0]; }
|
||||
};
|
||||
let mut end;
|
||||
loop {
|
||||
@ -406,7 +406,7 @@ impl<'a> Parser<'a> {
|
||||
None => { end = self.input.len(); break }
|
||||
}
|
||||
}
|
||||
self.input.index(&(start..end))
|
||||
&self.input[start..end]
|
||||
}
|
||||
|
||||
/// Optionally parses an integer at the current position. This doesn't deal
|
||||
|
@ -281,7 +281,7 @@ impl OptGroup {
|
||||
|
||||
impl Matches {
|
||||
fn opt_vals(&self, nm: &str) -> Vec<Optval> {
|
||||
match find_opt(self.opts.index(&FullRange), Name::from_str(nm)) {
|
||||
match find_opt(&self.opts[], Name::from_str(nm)) {
|
||||
Some(id) => self.vals[id].clone(),
|
||||
None => panic!("No option '{}' defined", nm)
|
||||
}
|
||||
@ -309,7 +309,7 @@ impl Matches {
|
||||
/// Returns true if any of several options were matched.
|
||||
pub fn opts_present(&self, names: &[String]) -> bool {
|
||||
for nm in names.iter() {
|
||||
match find_opt(self.opts.as_slice(), Name::from_str(nm.index(&FullRange))) {
|
||||
match find_opt(self.opts.as_slice(), Name::from_str(&nm[])) {
|
||||
Some(id) if !self.vals[id].is_empty() => return true,
|
||||
_ => (),
|
||||
};
|
||||
@ -320,7 +320,7 @@ impl Matches {
|
||||
/// Returns the string argument supplied to one of several matching options or `None`.
|
||||
pub fn opts_str(&self, names: &[String]) -> Option<String> {
|
||||
for nm in names.iter() {
|
||||
match self.opt_val(nm.index(&FullRange)) {
|
||||
match self.opt_val(&nm[]) {
|
||||
Some(Val(ref s)) => return Some(s.clone()),
|
||||
_ => ()
|
||||
}
|
||||
@ -585,7 +585,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
|
||||
while i < l {
|
||||
let cur = args[i].clone();
|
||||
let curlen = cur.len();
|
||||
if !is_arg(cur.index(&FullRange)) {
|
||||
if !is_arg(&cur[]) {
|
||||
free.push(cur);
|
||||
} else if cur == "--" {
|
||||
let mut j = i + 1;
|
||||
@ -595,7 +595,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
|
||||
let mut names;
|
||||
let mut i_arg = None;
|
||||
if cur.as_bytes()[1] == b'-' {
|
||||
let tail = cur.index(&(2..curlen));
|
||||
let tail = &cur[2..curlen];
|
||||
let tail_eq: Vec<&str> = tail.split('=').collect();
|
||||
if tail_eq.len() <= 1 {
|
||||
names = vec!(Long(tail.to_string()));
|
||||
@ -631,7 +631,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
|
||||
};
|
||||
|
||||
if arg_follows && range.next < curlen {
|
||||
i_arg = Some(cur.index(&(range.next..curlen)).to_string());
|
||||
i_arg = Some((&cur[range.next..curlen]).to_string());
|
||||
break;
|
||||
}
|
||||
|
||||
@ -659,7 +659,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
|
||||
v.push(Val((i_arg.clone())
|
||||
.unwrap()));
|
||||
} else if name_pos < names.len() || i + 1 == l ||
|
||||
is_arg(args[i + 1].index(&FullRange)) {
|
||||
is_arg(&args[i + 1][]) {
|
||||
let v = &mut vals[optid];
|
||||
v.push(Given);
|
||||
} else {
|
||||
@ -722,7 +722,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
0 => {}
|
||||
1 => {
|
||||
row.push('-');
|
||||
row.push_str(short_name.index(&FullRange));
|
||||
row.push_str(&short_name[]);
|
||||
row.push(' ');
|
||||
}
|
||||
_ => panic!("the short name should only be 1 ascii char long"),
|
||||
@ -733,7 +733,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
0 => {}
|
||||
_ => {
|
||||
row.push_str("--");
|
||||
row.push_str(long_name.index(&FullRange));
|
||||
row.push_str(&long_name[]);
|
||||
row.push(' ');
|
||||
}
|
||||
}
|
||||
@ -741,10 +741,10 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
// arg
|
||||
match hasarg {
|
||||
No => {}
|
||||
Yes => row.push_str(hint.index(&FullRange)),
|
||||
Yes => row.push_str(&hint[]),
|
||||
Maybe => {
|
||||
row.push('[');
|
||||
row.push_str(hint.index(&FullRange));
|
||||
row.push_str(&hint[]);
|
||||
row.push(']');
|
||||
}
|
||||
}
|
||||
@ -757,7 +757,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
row.push(' ');
|
||||
}
|
||||
} else {
|
||||
row.push_str(desc_sep.index(&FullRange));
|
||||
row.push_str(&desc_sep[]);
|
||||
}
|
||||
|
||||
// Normalize desc to contain words separated by one space character
|
||||
@ -769,14 +769,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
|
||||
// FIXME: #5516 should be graphemes not codepoints
|
||||
let mut desc_rows = Vec::new();
|
||||
each_split_within(desc_normalized_whitespace.index(&FullRange), 54, |substr| {
|
||||
each_split_within(&desc_normalized_whitespace[], 54, |substr| {
|
||||
desc_rows.push(substr.to_string());
|
||||
true
|
||||
});
|
||||
|
||||
// FIXME: #5516 should be graphemes not codepoints
|
||||
// wrapped description
|
||||
row.push_str(desc_rows.connect(desc_sep.index(&FullRange)).index(&FullRange));
|
||||
row.push_str(&desc_rows.connect(&desc_sep[])[]);
|
||||
|
||||
row
|
||||
});
|
||||
@ -795,10 +795,10 @@ fn format_option(opt: &OptGroup) -> String {
|
||||
// Use short_name is possible, but fallback to long_name.
|
||||
if opt.short_name.len() > 0 {
|
||||
line.push('-');
|
||||
line.push_str(opt.short_name.index(&FullRange));
|
||||
line.push_str(&opt.short_name[]);
|
||||
} else {
|
||||
line.push_str("--");
|
||||
line.push_str(opt.long_name.index(&FullRange));
|
||||
line.push_str(&opt.long_name[]);
|
||||
}
|
||||
|
||||
if opt.hasarg != No {
|
||||
@ -806,7 +806,7 @@ fn format_option(opt: &OptGroup) -> String {
|
||||
if opt.hasarg == Maybe {
|
||||
line.push('[');
|
||||
}
|
||||
line.push_str(opt.hint.index(&FullRange));
|
||||
line.push_str(&opt.hint[]);
|
||||
if opt.hasarg == Maybe {
|
||||
line.push(']');
|
||||
}
|
||||
@ -825,10 +825,10 @@ fn format_option(opt: &OptGroup) -> String {
|
||||
/// Derive a short one-line usage summary from a set of long options.
|
||||
pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String {
|
||||
let mut line = format!("Usage: {} ", program_name);
|
||||
line.push_str(opts.iter()
|
||||
.map(format_option)
|
||||
.collect::<Vec<String>>()
|
||||
.connect(" ").index(&FullRange));
|
||||
line.push_str(&opts.iter()
|
||||
.map(format_option)
|
||||
.collect::<Vec<String>>()
|
||||
.connect(" ")[]);
|
||||
line
|
||||
}
|
||||
|
||||
@ -891,9 +891,9 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
|
||||
(B, Cr, UnderLim) => { B }
|
||||
(B, Cr, OverLim) if (i - last_start + 1) > lim
|
||||
=> panic!("word starting with {} longer than limit!",
|
||||
ss.index(&(last_start..(i + 1)))),
|
||||
&ss[last_start..(i + 1)]),
|
||||
(B, Cr, OverLim) => {
|
||||
*cont = it(ss.index(&(slice_start..last_end)));
|
||||
*cont = it(&ss[slice_start..last_end]);
|
||||
slice_start = last_start;
|
||||
B
|
||||
}
|
||||
@ -903,7 +903,7 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
|
||||
}
|
||||
(B, Ws, OverLim) => {
|
||||
last_end = i;
|
||||
*cont = it(ss.index(&(slice_start..last_end)));
|
||||
*cont = it(&ss[slice_start..last_end]);
|
||||
A
|
||||
}
|
||||
|
||||
@ -912,14 +912,14 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
|
||||
B
|
||||
}
|
||||
(C, Cr, OverLim) => {
|
||||
*cont = it(ss.index(&(slice_start..last_end)));
|
||||
*cont = it(&ss[slice_start..last_end]);
|
||||
slice_start = i;
|
||||
last_start = i;
|
||||
last_end = i;
|
||||
B
|
||||
}
|
||||
(C, Ws, OverLim) => {
|
||||
*cont = it(ss.index(&(slice_start..last_end)));
|
||||
*cont = it(&ss[slice_start..last_end]);
|
||||
A
|
||||
}
|
||||
(C, Ws, UnderLim) => {
|
||||
|
@ -453,7 +453,7 @@ impl<'a> LabelText<'a> {
|
||||
pub fn escape(&self) -> String {
|
||||
match self {
|
||||
&LabelStr(ref s) => s.escape_default(),
|
||||
&EscStr(ref s) => LabelText::escape_str(s.index(&FullRange)),
|
||||
&EscStr(ref s) => LabelText::escape_str(&s[]),
|
||||
}
|
||||
}
|
||||
|
||||
@ -482,7 +482,7 @@ impl<'a> LabelText<'a> {
|
||||
let mut prefix = self.pre_escaped_content().into_owned();
|
||||
let suffix = suffix.pre_escaped_content();
|
||||
prefix.push_str(r"\n\n");
|
||||
prefix.push_str(suffix.index(&FullRange));
|
||||
prefix.push_str(&suffix[]);
|
||||
EscStr(prefix.into_cow())
|
||||
}
|
||||
}
|
||||
@ -676,7 +676,7 @@ mod tests {
|
||||
|
||||
impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph {
|
||||
fn graph_id(&'a self) -> Id<'a> {
|
||||
Id::new(self.name.index(&FullRange)).unwrap()
|
||||
Id::new(&self.name[]).unwrap()
|
||||
}
|
||||
fn node_id(&'a self, n: &Node) -> Id<'a> {
|
||||
id_name(n)
|
||||
|
@ -288,7 +288,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) {
|
||||
// Test the literal string from args against the current filter, if there
|
||||
// is one.
|
||||
match unsafe { FILTER.as_ref() } {
|
||||
Some(filter) if !filter.is_match(args.to_string().index(&FullRange)) => return,
|
||||
Some(filter) if !filter.is_match(&args.to_string()[]) => return,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@ -383,7 +383,7 @@ fn enabled(level: u32,
|
||||
// Search for the longest match, the vector is assumed to be pre-sorted.
|
||||
for directive in iter.rev() {
|
||||
match directive.name {
|
||||
Some(ref name) if !module.starts_with(name.index(&FullRange)) => {},
|
||||
Some(ref name) if !module.starts_with(&name[]) => {},
|
||||
Some(..) | None => {
|
||||
return level <= directive.level
|
||||
}
|
||||
@ -398,7 +398,7 @@ fn enabled(level: u32,
|
||||
/// `Once` primitive (and this function is called from that primitive).
|
||||
fn init() {
|
||||
let (mut directives, filter) = match os::getenv("RUST_LOG") {
|
||||
Some(spec) => directive::parse_logging_spec(spec.index(&FullRange)),
|
||||
Some(spec) => directive::parse_logging_spec(&spec[]),
|
||||
None => (Vec::new(), None),
|
||||
};
|
||||
|
||||
|
@ -271,7 +271,7 @@ pub trait Rng : Sized {
|
||||
/// let mut rng = thread_rng();
|
||||
/// println!("{:?}", rng.choose(&choices));
|
||||
/// # // uncomment when slicing syntax is stable
|
||||
/// //assert_eq!(rng.choose(choices.index(&(0..0))), None);
|
||||
/// //assert_eq!(rng.choose(&choices[0..0]), None);
|
||||
/// ```
|
||||
fn choose<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> {
|
||||
if values.is_empty() {
|
||||
|
@ -95,7 +95,7 @@ impl Writer for SeekableMemWriter {
|
||||
// there (left), and what will be appended on the end (right)
|
||||
let cap = self.buf.len() - self.pos;
|
||||
let (left, right) = if cap <= buf.len() {
|
||||
(buf.index(&(0..cap)), buf.index(&(cap..)))
|
||||
(&buf[0..cap], &buf[cap..])
|
||||
} else {
|
||||
let result: (_, &[_]) = (buf, &[]);
|
||||
result
|
||||
|
@ -57,7 +57,7 @@ impl<'doc> Doc<'doc> {
|
||||
}
|
||||
|
||||
pub fn as_str_slice<'a>(&'a self) -> &'a str {
|
||||
str::from_utf8(self.data.index(&(self.start..self.end))).unwrap()
|
||||
str::from_utf8(&self.data[self.start..self.end]).unwrap()
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> String {
|
||||
@ -292,7 +292,7 @@ pub mod reader {
|
||||
pub fn with_doc_data<T, F>(d: Doc, f: F) -> T where
|
||||
F: FnOnce(&[u8]) -> T,
|
||||
{
|
||||
f(d.data.index(&(d.start..d.end)))
|
||||
f(&d.data[d.start..d.end])
|
||||
}
|
||||
|
||||
|
||||
|
@ -105,7 +105,7 @@ impl Program {
|
||||
// This is a bit hacky since we have to skip over the initial
|
||||
// 'Save' instruction.
|
||||
let mut pre = String::with_capacity(5);
|
||||
for inst in c.insts.index(&(1..)).iter() {
|
||||
for inst in c.insts[1..].iter() {
|
||||
match *inst {
|
||||
OneChar(c, FLAG_EMPTY) => pre.push(c),
|
||||
_ => break
|
||||
|
@ -18,7 +18,6 @@ use std::cmp;
|
||||
use std::fmt;
|
||||
use std::iter;
|
||||
use std::num;
|
||||
use std::ops::Index;
|
||||
|
||||
/// Static data containing Unicode ranges for general categories and scripts.
|
||||
use unicode::regex::{UNICODE_CLASSES, PERLD, PERLS, PERLW};
|
||||
@ -285,8 +284,8 @@ impl<'a> Parser<'a> {
|
||||
match self.next_char() {
|
||||
true => Ok(()),
|
||||
false => {
|
||||
self.err(format!("Expected {:?} but got EOF.",
|
||||
expected).index(&FullRange))
|
||||
self.err(&format!("Expected {:?} but got EOF.",
|
||||
expected)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -294,11 +293,11 @@ impl<'a> Parser<'a> {
|
||||
fn expect(&mut self, expected: char) -> Result<(), Error> {
|
||||
match self.next_char() {
|
||||
true if self.cur() == expected => Ok(()),
|
||||
true => self.err(format!("Expected '{:?}' but got '{:?}'.",
|
||||
expected, self.cur()).index(&FullRange)),
|
||||
true => self.err(&format!("Expected '{:?}' but got '{:?}'.",
|
||||
expected, self.cur())[]),
|
||||
false => {
|
||||
self.err(format!("Expected '{:?}' but got EOF.",
|
||||
expected).index(&FullRange))
|
||||
self.err(&format!("Expected '{:?}' but got EOF.",
|
||||
expected)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -443,15 +442,15 @@ impl<'a> Parser<'a> {
|
||||
match try!(self.parse_escape()) {
|
||||
Literal(c3, _) => c2 = c3, // allow literal escapes below
|
||||
ast =>
|
||||
return self.err(format!("Expected a literal, but got {:?}.",
|
||||
ast).index(&FullRange)),
|
||||
return self.err(&format!("Expected a literal, but got {:?}.",
|
||||
ast)[]),
|
||||
}
|
||||
}
|
||||
if c2 < c {
|
||||
return self.err(format!("Invalid character class \
|
||||
range '{}-{}'",
|
||||
c,
|
||||
c2).index(&FullRange))
|
||||
return self.err(&format!("Invalid character class \
|
||||
range '{}-{}'",
|
||||
c,
|
||||
c2)[])
|
||||
}
|
||||
ranges.push((c, self.cur()))
|
||||
} else {
|
||||
@ -489,7 +488,7 @@ impl<'a> Parser<'a> {
|
||||
FLAG_EMPTY
|
||||
};
|
||||
let name = self.slice(name_start, closer - 1);
|
||||
match find_class(ASCII_CLASSES, name.index(&FullRange)) {
|
||||
match find_class(ASCII_CLASSES, &name[]) {
|
||||
None => None,
|
||||
Some(ranges) => {
|
||||
self.chari = closer;
|
||||
@ -511,21 +510,21 @@ impl<'a> Parser<'a> {
|
||||
match self.pos('}') {
|
||||
Some(i) => i,
|
||||
None => {
|
||||
return self.err(format!("No closing brace for counted \
|
||||
repetition starting at position \
|
||||
{:?}.",
|
||||
start).index(&FullRange))
|
||||
return self.err(&format!("No closing brace for counted \
|
||||
repetition starting at position \
|
||||
{:?}.",
|
||||
start)[])
|
||||
}
|
||||
};
|
||||
self.chari = closer;
|
||||
let greed = try!(self.get_next_greedy());
|
||||
let inner = self.chars.index(&((start+1)..closer)).iter().cloned()
|
||||
let inner = self.chars[(start+1)..closer].iter().cloned()
|
||||
.collect::<String>();
|
||||
|
||||
// Parse the min and max values from the regex.
|
||||
let (mut min, mut max): (uint, Option<uint>);
|
||||
if !inner.contains(",") {
|
||||
min = try!(self.parse_uint(inner.index(&FullRange)));
|
||||
min = try!(self.parse_uint(&inner[]));
|
||||
max = Some(min);
|
||||
} else {
|
||||
let pieces: Vec<&str> = inner.splitn(1, ',').collect();
|
||||
@ -545,21 +544,21 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Do some bounds checking and make sure max >= min.
|
||||
if min > MAX_REPEAT {
|
||||
return self.err(format!(
|
||||
return self.err(&format!(
|
||||
"{} exceeds maximum allowed repetitions ({})",
|
||||
min, MAX_REPEAT).index(&FullRange));
|
||||
min, MAX_REPEAT)[]);
|
||||
}
|
||||
if max.is_some() {
|
||||
let m = max.unwrap();
|
||||
if m > MAX_REPEAT {
|
||||
return self.err(format!(
|
||||
return self.err(&format!(
|
||||
"{} exceeds maximum allowed repetitions ({})",
|
||||
m, MAX_REPEAT).index(&FullRange));
|
||||
m, MAX_REPEAT)[]);
|
||||
}
|
||||
if m < min {
|
||||
return self.err(format!(
|
||||
return self.err(&format!(
|
||||
"Max repetitions ({}) cannot be smaller than min \
|
||||
repetitions ({}).", m, min).index(&FullRange));
|
||||
repetitions ({}).", m, min)[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -623,7 +622,7 @@ impl<'a> Parser<'a> {
|
||||
Ok(AstClass(ranges, flags))
|
||||
}
|
||||
_ => {
|
||||
self.err(format!("Invalid escape sequence '\\\\{}'", c).index(&FullRange))
|
||||
self.err(&format!("Invalid escape sequence '\\\\{}'", c)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -641,9 +640,9 @@ impl<'a> Parser<'a> {
|
||||
let closer =
|
||||
match self.pos('}') {
|
||||
Some(i) => i,
|
||||
None => return self.err(format!(
|
||||
None => return self.err(&format!(
|
||||
"Missing '}}' for unclosed '{{' at position {}",
|
||||
self.chari).index(&FullRange)),
|
||||
self.chari)[]),
|
||||
};
|
||||
if closer - self.chari + 1 == 0 {
|
||||
return self.err("No Unicode class name found.")
|
||||
@ -657,10 +656,10 @@ impl<'a> Parser<'a> {
|
||||
name = self.slice(self.chari + 1, self.chari + 2);
|
||||
self.chari += 1;
|
||||
}
|
||||
match find_class(UNICODE_CLASSES, name.index(&FullRange)) {
|
||||
match find_class(UNICODE_CLASSES, &name[]) {
|
||||
None => {
|
||||
return self.err(format!("Could not find Unicode class '{}'",
|
||||
name).index(&FullRange))
|
||||
return self.err(&format!("Could not find Unicode class '{}'",
|
||||
name)[])
|
||||
}
|
||||
Some(ranges) => {
|
||||
Ok(AstClass(ranges, negated | (self.flags & FLAG_NOCASE)))
|
||||
@ -683,11 +682,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
let s = self.slice(start, end);
|
||||
match num::from_str_radix::<u32>(s.index(&FullRange), 8) {
|
||||
match num::from_str_radix::<u32>(&s[], 8) {
|
||||
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
|
||||
None => {
|
||||
self.err(format!("Could not parse '{:?}' as octal number.",
|
||||
s).index(&FullRange))
|
||||
self.err(&format!("Could not parse '{:?}' as octal number.",
|
||||
s)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -703,14 +702,14 @@ impl<'a> Parser<'a> {
|
||||
let closer =
|
||||
match self.pos('}') {
|
||||
None => {
|
||||
return self.err(format!("Missing '}}' for unclosed \
|
||||
return self.err(&format!("Missing '}}' for unclosed \
|
||||
'{{' at position {}",
|
||||
start).index(&FullRange))
|
||||
start)[])
|
||||
}
|
||||
Some(i) => i,
|
||||
};
|
||||
self.chari = closer;
|
||||
self.parse_hex_digits(self.slice(start, closer).index(&FullRange))
|
||||
self.parse_hex_digits(&self.slice(start, closer)[])
|
||||
}
|
||||
|
||||
// Parses a two-digit hex number.
|
||||
@ -730,7 +729,7 @@ impl<'a> Parser<'a> {
|
||||
match num::from_str_radix::<u32>(s, 16) {
|
||||
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
|
||||
None => {
|
||||
self.err(format!("Could not parse '{}' as hex number.", s).index(&FullRange))
|
||||
self.err(&format!("Could not parse '{}' as hex number.", s)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -755,8 +754,8 @@ impl<'a> Parser<'a> {
|
||||
"Capture names can only have underscores, letters and digits.")
|
||||
}
|
||||
if self.names.contains(&name) {
|
||||
return self.err(format!("Duplicate capture group name '{}'.",
|
||||
name).index(&FullRange))
|
||||
return self.err(&format!("Duplicate capture group name '{}'.",
|
||||
name)[])
|
||||
}
|
||||
self.names.push(name.clone());
|
||||
self.chari = closer;
|
||||
@ -788,9 +787,9 @@ impl<'a> Parser<'a> {
|
||||
'U' => { flags = flags | FLAG_SWAP_GREED; saw_flag = true},
|
||||
'-' => {
|
||||
if sign < 0 {
|
||||
return self.err(format!(
|
||||
return self.err(&format!(
|
||||
"Cannot negate flags twice in '{}'.",
|
||||
self.slice(start, self.chari + 1)).index(&FullRange))
|
||||
self.slice(start, self.chari + 1))[])
|
||||
}
|
||||
sign = -1;
|
||||
saw_flag = false;
|
||||
@ -799,9 +798,9 @@ impl<'a> Parser<'a> {
|
||||
':' | ')' => {
|
||||
if sign < 0 {
|
||||
if !saw_flag {
|
||||
return self.err(format!(
|
||||
return self.err(&format!(
|
||||
"A valid flag does not follow negation in '{}'",
|
||||
self.slice(start, self.chari + 1)).index(&FullRange))
|
||||
self.slice(start, self.chari + 1))[])
|
||||
}
|
||||
flags = flags ^ flags;
|
||||
}
|
||||
@ -812,8 +811,8 @@ impl<'a> Parser<'a> {
|
||||
self.flags = flags;
|
||||
return Ok(())
|
||||
}
|
||||
_ => return self.err(format!(
|
||||
"Unrecognized flag '{}'.", self.cur()).index(&FullRange)),
|
||||
_ => return self.err(&format!(
|
||||
"Unrecognized flag '{}'.", self.cur())[]),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -910,8 +909,8 @@ impl<'a> Parser<'a> {
|
||||
match s.parse::<uint>() {
|
||||
Some(i) => Ok(i),
|
||||
None => {
|
||||
self.err(format!("Expected an unsigned integer but got '{}'.",
|
||||
s).index(&FullRange))
|
||||
self.err(&format!("Expected an unsigned integer but got '{}'.",
|
||||
s)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -920,8 +919,8 @@ impl<'a> Parser<'a> {
|
||||
match char::from_u32(n) {
|
||||
Some(c) => Ok(c),
|
||||
None => {
|
||||
self.err(format!("Could not decode '{}' to unicode \
|
||||
character.", n).index(&FullRange))
|
||||
self.err(&format!("Could not decode '{}' to unicode \
|
||||
character.", n)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -954,7 +953,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn slice(&self, start: uint, end: uint) -> String {
|
||||
self.chars.index(&(start..end)).iter().cloned().collect()
|
||||
self.chars[start..end].iter().cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -238,19 +238,19 @@ impl Regex {
|
||||
}
|
||||
|
||||
let (s, e) = cap.pos(0).unwrap(); // captures only reports matches
|
||||
new.push_str(text.index(&(last_match..s)));
|
||||
new.push_str(rep.reg_replace(&cap).index(&FullRange));
|
||||
new.push_str(&text[last_match..s]);
|
||||
new.push_str(&rep.reg_replace(&cap)[]);
|
||||
last_match = e;
|
||||
}
|
||||
new.push_str(text.index(&(last_match..text.len())));
|
||||
new.push_str(&text[last_match..text.len()]);
|
||||
return new;
|
||||
}
|
||||
|
||||
/// Returns the original string of this regex.
|
||||
pub fn as_str<'a>(&'a self) -> &'a str {
|
||||
match *self {
|
||||
Dynamic(ExDynamic { ref original, .. }) => original.index(&FullRange),
|
||||
Native(ExNative { ref original, .. }) => original.index(&FullRange),
|
||||
Dynamic(ExDynamic { ref original, .. }) => &original[],
|
||||
Native(ExNative { ref original, .. }) => &original[],
|
||||
}
|
||||
}
|
||||
|
||||
@ -347,13 +347,13 @@ impl<'r, 't> Iterator for RegexSplits<'r, 't> {
|
||||
if self.last >= text.len() {
|
||||
None
|
||||
} else {
|
||||
let s = text.index(&(self.last..text.len()));
|
||||
let s = &text[self.last..text.len()];
|
||||
self.last = text.len();
|
||||
Some(s)
|
||||
}
|
||||
}
|
||||
Some((s, e)) => {
|
||||
let matched = text.index(&(self.last..s));
|
||||
let matched = &text[self.last..s];
|
||||
self.last = e;
|
||||
Some(matched)
|
||||
}
|
||||
@ -384,7 +384,7 @@ impl<'r, 't> Iterator for RegexSplitsN<'r, 't> {
|
||||
} else {
|
||||
self.cur += 1;
|
||||
if self.cur >= self.limit {
|
||||
Some(text.index(&(self.splits.last..text.len())))
|
||||
Some(&text[self.splits.last..text.len()])
|
||||
} else {
|
||||
self.splits.next()
|
||||
}
|
||||
@ -517,7 +517,7 @@ impl<'t> Captures<'t> {
|
||||
})
|
||||
});
|
||||
let re = Regex::new(r"\$\$").unwrap();
|
||||
re.replace_all(text.index(&FullRange), NoExpand("$"))
|
||||
re.replace_all(&text[], NoExpand("$"))
|
||||
}
|
||||
|
||||
/// Returns the number of captured groups.
|
||||
|
@ -152,7 +152,7 @@ impl<'r, 't> Nfa<'r, 't> {
|
||||
// out early.
|
||||
if self.prog.prefix.len() > 0 && clist.size == 0 {
|
||||
let needle = self.prog.prefix.as_bytes();
|
||||
let haystack = self.input.as_bytes().index(&(self.ic..));
|
||||
let haystack = &self.input.as_bytes()[self.ic..];
|
||||
match find_prefix(needle, haystack) {
|
||||
None => break,
|
||||
Some(i) => {
|
||||
|
@ -506,7 +506,7 @@ impl BoxPointers {
|
||||
if n_uniq > 0 {
|
||||
let s = ty_to_string(cx.tcx, ty);
|
||||
let m = format!("type uses owned (Box type) pointers: {}", s);
|
||||
cx.span_lint(BOX_POINTERS, span, m.index(&FullRange));
|
||||
cx.span_lint(BOX_POINTERS, span, &m[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -586,7 +586,7 @@ impl LintPass for RawPointerDerive {
|
||||
}
|
||||
|
||||
fn check_item(&mut self, cx: &Context, item: &ast::Item) {
|
||||
if !attr::contains_name(item.attrs.index(&FullRange), "automatically_derived") {
|
||||
if !attr::contains_name(&item.attrs[], "automatically_derived") {
|
||||
return
|
||||
}
|
||||
let did = match item.node {
|
||||
@ -770,11 +770,11 @@ impl LintPass for UnusedResults {
|
||||
ty::ty_enum(did, _) => {
|
||||
if ast_util::is_local(did) {
|
||||
if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) {
|
||||
warned |= check_must_use(cx, it.attrs.index(&FullRange), s.span);
|
||||
warned |= check_must_use(cx, &it.attrs[], s.span);
|
||||
}
|
||||
} else {
|
||||
csearch::get_item_attrs(&cx.sess().cstore, did, |attrs| {
|
||||
warned |= check_must_use(cx, attrs.index(&FullRange), s.span);
|
||||
warned |= check_must_use(cx, &attrs[], s.span);
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -796,7 +796,7 @@ impl LintPass for UnusedResults {
|
||||
msg.push_str(s.get());
|
||||
}
|
||||
}
|
||||
cx.span_lint(UNUSED_MUST_USE, sp, msg.index(&FullRange));
|
||||
cx.span_lint(UNUSED_MUST_USE, sp, &msg[]);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -842,7 +842,7 @@ impl NonCamelCaseTypes {
|
||||
} else {
|
||||
format!("{} `{}` should have a camel case name such as `{}`", sort, s, c)
|
||||
};
|
||||
cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.index(&FullRange));
|
||||
cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -981,8 +981,8 @@ impl NonSnakeCase {
|
||||
|
||||
if !is_snake_case(ident) {
|
||||
cx.span_lint(NON_SNAKE_CASE, span,
|
||||
format!("{} `{}` should have a snake case name such as `{}`",
|
||||
sort, s, to_snake_case(s.get())).index(&FullRange));
|
||||
&format!("{} `{}` should have a snake case name such as `{}`",
|
||||
sort, s, to_snake_case(s.get()))[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1066,10 +1066,10 @@ impl LintPass for NonUpperCaseGlobals {
|
||||
// upper/lowercase)
|
||||
if s.get().chars().any(|c| c.is_lowercase()) {
|
||||
cx.span_lint(NON_UPPER_CASE_GLOBALS, it.span,
|
||||
format!("static constant `{}` should have an uppercase name \
|
||||
&format!("static constant `{}` should have an uppercase name \
|
||||
such as `{}`",
|
||||
s.get(), s.get().chars().map(|c| c.to_uppercase())
|
||||
.collect::<String>().index(&FullRange)).index(&FullRange));
|
||||
s.get(), &s.get().chars().map(|c| c.to_uppercase())
|
||||
.collect::<String>()[])[]);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@ -1083,10 +1083,10 @@ impl LintPass for NonUpperCaseGlobals {
|
||||
let s = token::get_ident(path1.node);
|
||||
if s.get().chars().any(|c| c.is_lowercase()) {
|
||||
cx.span_lint(NON_UPPER_CASE_GLOBALS, path1.span,
|
||||
format!("static constant in pattern `{}` should have an uppercase \
|
||||
&format!("static constant in pattern `{}` should have an uppercase \
|
||||
name such as `{}`",
|
||||
s.get(), s.get().chars().map(|c| c.to_uppercase())
|
||||
.collect::<String>().index(&FullRange)).index(&FullRange));
|
||||
s.get(), &s.get().chars().map(|c| c.to_uppercase())
|
||||
.collect::<String>()[])[]);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@ -1110,8 +1110,8 @@ impl UnusedParens {
|
||||
let necessary = struct_lit_needs_parens && contains_exterior_struct_lit(&**inner);
|
||||
if !necessary {
|
||||
cx.span_lint(UNUSED_PARENS, value.span,
|
||||
format!("unnecessary parentheses around {}",
|
||||
msg).index(&FullRange))
|
||||
&format!("unnecessary parentheses around {}",
|
||||
msg)[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -1213,7 +1213,7 @@ impl LintPass for UnusedImportBraces {
|
||||
let m = format!("braces around {} is unnecessary",
|
||||
token::get_ident(*name).get());
|
||||
cx.span_lint(UNUSED_IMPORT_BRACES, view_item.span,
|
||||
m.index(&FullRange));
|
||||
&m[]);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
@ -1251,8 +1251,8 @@ impl LintPass for NonShorthandFieldPatterns {
|
||||
if let ast::PatIdent(_, ident, None) = fieldpat.node.pat.node {
|
||||
if ident.node.as_str() == fieldpat.node.ident.as_str() {
|
||||
cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span,
|
||||
format!("the `{}:` in this pattern is redundant and can \
|
||||
be removed", ident.node.as_str()).index(&FullRange))
|
||||
&format!("the `{}:` in this pattern is redundant and can \
|
||||
be removed", ident.node.as_str())[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1356,7 +1356,7 @@ impl LintPass for UnusedMut {
|
||||
fn check_expr(&mut self, cx: &Context, e: &ast::Expr) {
|
||||
if let ast::ExprMatch(_, ref arms, _) = e.node {
|
||||
for a in arms.iter() {
|
||||
self.check_unused_mut_pat(cx, a.pats.index(&FullRange))
|
||||
self.check_unused_mut_pat(cx, &a.pats[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1477,7 +1477,7 @@ impl MissingDoc {
|
||||
});
|
||||
if !has_doc {
|
||||
cx.span_lint(MISSING_DOCS, sp,
|
||||
format!("missing documentation for {}", desc).index(&FullRange));
|
||||
&format!("missing documentation for {}", desc)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1491,7 +1491,7 @@ impl LintPass for MissingDoc {
|
||||
let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| {
|
||||
attr.check_name("doc") && match attr.meta_item_list() {
|
||||
None => false,
|
||||
Some(l) => attr::contains_name(l.index(&FullRange), "hidden"),
|
||||
Some(l) => attr::contains_name(&l[], "hidden"),
|
||||
}
|
||||
});
|
||||
self.doc_hidden_stack.push(doc_hidden);
|
||||
@ -1513,7 +1513,7 @@ impl LintPass for MissingDoc {
|
||||
}
|
||||
|
||||
fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) {
|
||||
self.check_missing_docs_attrs(cx, None, krate.attrs.index(&FullRange),
|
||||
self.check_missing_docs_attrs(cx, None, &krate.attrs[],
|
||||
krate.span, "crate");
|
||||
}
|
||||
|
||||
@ -1527,7 +1527,7 @@ impl LintPass for MissingDoc {
|
||||
ast::ItemTy(..) => "a type alias",
|
||||
_ => return
|
||||
};
|
||||
self.check_missing_docs_attrs(cx, Some(it.id), it.attrs.index(&FullRange),
|
||||
self.check_missing_docs_attrs(cx, Some(it.id), &it.attrs[],
|
||||
it.span, desc);
|
||||
}
|
||||
|
||||
@ -1540,13 +1540,13 @@ impl LintPass for MissingDoc {
|
||||
|
||||
// Otherwise, doc according to privacy. This will also check
|
||||
// doc for default methods defined on traits.
|
||||
self.check_missing_docs_attrs(cx, Some(m.id), m.attrs.index(&FullRange),
|
||||
self.check_missing_docs_attrs(cx, Some(m.id), &m.attrs[],
|
||||
m.span, "a method");
|
||||
}
|
||||
}
|
||||
|
||||
fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) {
|
||||
self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs.index(&FullRange),
|
||||
self.check_missing_docs_attrs(cx, Some(tm.id), &tm.attrs[],
|
||||
tm.span, "a type method");
|
||||
}
|
||||
|
||||
@ -1556,14 +1556,14 @@ impl LintPass for MissingDoc {
|
||||
let cur_struct_def = *self.struct_def_stack.last()
|
||||
.expect("empty struct_def_stack");
|
||||
self.check_missing_docs_attrs(cx, Some(cur_struct_def),
|
||||
sf.node.attrs.index(&FullRange), sf.span,
|
||||
&sf.node.attrs[], sf.span,
|
||||
"a struct field")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) {
|
||||
self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs.index(&FullRange),
|
||||
self.check_missing_docs_attrs(cx, Some(v.node.id), &v.node.attrs[],
|
||||
v.span, "a variant");
|
||||
assert!(!self.in_variant);
|
||||
self.in_variant = true;
|
||||
@ -1693,7 +1693,7 @@ impl Stability {
|
||||
_ => format!("use of {} item", label)
|
||||
};
|
||||
|
||||
cx.span_lint(lint, span, msg.index(&FullRange));
|
||||
cx.span_lint(lint, span, &msg[]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ impl LintStore {
|
||||
}
|
||||
|
||||
pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] {
|
||||
self.lints.index(&FullRange)
|
||||
&self.lints[]
|
||||
}
|
||||
|
||||
pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec<LintId>, bool)> {
|
||||
@ -125,11 +125,11 @@ impl LintStore {
|
||||
match (sess, from_plugin) {
|
||||
// We load builtin lints first, so a duplicate is a compiler bug.
|
||||
// Use early_error when handling -W help with no crate.
|
||||
(None, _) => early_error(msg.index(&FullRange)),
|
||||
(Some(sess), false) => sess.bug(msg.index(&FullRange)),
|
||||
(None, _) => early_error(&msg[]),
|
||||
(Some(sess), false) => sess.bug(&msg[]),
|
||||
|
||||
// A duplicate name from a plugin is a user error.
|
||||
(Some(sess), true) => sess.err(msg.index(&FullRange)),
|
||||
(Some(sess), true) => sess.err(&msg[]),
|
||||
}
|
||||
}
|
||||
|
||||
@ -150,11 +150,11 @@ impl LintStore {
|
||||
match (sess, from_plugin) {
|
||||
// We load builtin lints first, so a duplicate is a compiler bug.
|
||||
// Use early_error when handling -W help with no crate.
|
||||
(None, _) => early_error(msg.index(&FullRange)),
|
||||
(Some(sess), false) => sess.bug(msg.index(&FullRange)),
|
||||
(None, _) => early_error(&msg[]),
|
||||
(Some(sess), false) => sess.bug(&msg[]),
|
||||
|
||||
// A duplicate name from a plugin is a user error.
|
||||
(Some(sess), true) => sess.err(msg.index(&FullRange)),
|
||||
(Some(sess), true) => sess.err(&msg[]),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -269,8 +269,8 @@ impl LintStore {
|
||||
let warning = format!("lint {} has been renamed to {}",
|
||||
lint_name, new_name);
|
||||
match span {
|
||||
Some(span) => sess.span_warn(span, warning.index(&FullRange)),
|
||||
None => sess.warn(warning.index(&FullRange)),
|
||||
Some(span) => sess.span_warn(span, &warning[]),
|
||||
None => sess.warn(&warning[]),
|
||||
};
|
||||
Some(lint_id)
|
||||
}
|
||||
@ -280,21 +280,21 @@ impl LintStore {
|
||||
|
||||
pub fn process_command_line(&mut self, sess: &Session) {
|
||||
for &(ref lint_name, level) in sess.opts.lint_opts.iter() {
|
||||
match self.find_lint(lint_name.index(&FullRange), sess, None) {
|
||||
match self.find_lint(&lint_name[], sess, None) {
|
||||
Some(lint_id) => self.set_level(lint_id, (level, CommandLine)),
|
||||
None => {
|
||||
match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
|
||||
.collect::<FnvHashMap<&'static str,
|
||||
Vec<LintId>>>()
|
||||
.get(lint_name.index(&FullRange)) {
|
||||
.get(&lint_name[]) {
|
||||
Some(v) => {
|
||||
v.iter()
|
||||
.map(|lint_id: &LintId|
|
||||
self.set_level(*lint_id, (level, CommandLine)))
|
||||
.collect::<Vec<()>>();
|
||||
}
|
||||
None => sess.err(format!("unknown {} flag: {}",
|
||||
level.as_str(), lint_name).index(&FullRange)),
|
||||
None => sess.err(&format!("unknown {} flag: {}",
|
||||
level.as_str(), lint_name)[]),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -435,10 +435,10 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint,
|
||||
if level == Forbid { level = Deny; }
|
||||
|
||||
match (level, span) {
|
||||
(Warn, Some(sp)) => sess.span_warn(sp, msg.index(&FullRange)),
|
||||
(Warn, None) => sess.warn(msg.index(&FullRange)),
|
||||
(Deny, Some(sp)) => sess.span_err(sp, msg.index(&FullRange)),
|
||||
(Deny, None) => sess.err(msg.index(&FullRange)),
|
||||
(Warn, Some(sp)) => sess.span_warn(sp, &msg[]),
|
||||
(Warn, None) => sess.warn(&msg[]),
|
||||
(Deny, Some(sp)) => sess.span_err(sp, &msg[]),
|
||||
(Deny, None) => sess.err(&msg[]),
|
||||
_ => sess.bug("impossible level in raw_emit_lint"),
|
||||
}
|
||||
|
||||
@ -549,9 +549,9 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
if now == Forbid && level != Forbid {
|
||||
let lint_name = lint_id.as_str();
|
||||
self.tcx.sess.span_err(span,
|
||||
format!("{}({}) overruled by outer forbid({})",
|
||||
&format!("{}({}) overruled by outer forbid({})",
|
||||
level.as_str(), lint_name,
|
||||
lint_name).index(&FullRange));
|
||||
lint_name)[]);
|
||||
} else if now != level {
|
||||
let src = self.lints.get_level_source(lint_id).1;
|
||||
self.level_stack.push((lint_id, (now, src)));
|
||||
@ -586,7 +586,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
fn visit_item(&mut self, it: &ast::Item) {
|
||||
self.with_lint_attrs(it.attrs.index(&FullRange), |cx| {
|
||||
self.with_lint_attrs(&it.attrs[], |cx| {
|
||||
run_lints!(cx, check_item, it);
|
||||
cx.visit_ids(|v| v.visit_item(it));
|
||||
visit::walk_item(cx, it);
|
||||
@ -594,14 +594,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_foreign_item(&mut self, it: &ast::ForeignItem) {
|
||||
self.with_lint_attrs(it.attrs.index(&FullRange), |cx| {
|
||||
self.with_lint_attrs(&it.attrs[], |cx| {
|
||||
run_lints!(cx, check_foreign_item, it);
|
||||
visit::walk_foreign_item(cx, it);
|
||||
})
|
||||
}
|
||||
|
||||
fn visit_view_item(&mut self, i: &ast::ViewItem) {
|
||||
self.with_lint_attrs(i.attrs.index(&FullRange), |cx| {
|
||||
self.with_lint_attrs(&i.attrs[], |cx| {
|
||||
run_lints!(cx, check_view_item, i);
|
||||
cx.visit_ids(|v| v.visit_view_item(i));
|
||||
visit::walk_view_item(cx, i);
|
||||
@ -627,7 +627,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
body: &'v ast::Block, span: Span, id: ast::NodeId) {
|
||||
match fk {
|
||||
visit::FkMethod(_, _, m) => {
|
||||
self.with_lint_attrs(m.attrs.index(&FullRange), |cx| {
|
||||
self.with_lint_attrs(&m.attrs[], |cx| {
|
||||
run_lints!(cx, check_fn, fk, decl, body, span, id);
|
||||
cx.visit_ids(|v| {
|
||||
v.visit_fn(fk, decl, body, span, id);
|
||||
@ -643,7 +643,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_ty_method(&mut self, t: &ast::TypeMethod) {
|
||||
self.with_lint_attrs(t.attrs.index(&FullRange), |cx| {
|
||||
self.with_lint_attrs(&t.attrs[], |cx| {
|
||||
run_lints!(cx, check_ty_method, t);
|
||||
visit::walk_ty_method(cx, t);
|
||||
})
|
||||
@ -660,14 +660,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_struct_field(&mut self, s: &ast::StructField) {
|
||||
self.with_lint_attrs(s.node.attrs.index(&FullRange), |cx| {
|
||||
self.with_lint_attrs(&s.node.attrs[], |cx| {
|
||||
run_lints!(cx, check_struct_field, s);
|
||||
visit::walk_struct_field(cx, s);
|
||||
})
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) {
|
||||
self.with_lint_attrs(v.node.attrs.index(&FullRange), |cx| {
|
||||
self.with_lint_attrs(&v.node.attrs[], |cx| {
|
||||
run_lints!(cx, check_variant, v, g);
|
||||
visit::walk_variant(cx, v, g);
|
||||
run_lints!(cx, check_variant_post, v, g);
|
||||
@ -761,7 +761,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> {
|
||||
None => {}
|
||||
Some(lints) => {
|
||||
for (lint_id, span, msg) in lints.into_iter() {
|
||||
self.span_lint(lint_id.lint, span, msg.index(&FullRange))
|
||||
self.span_lint(lint_id.lint, span, &msg[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -811,7 +811,7 @@ pub fn check_crate(tcx: &ty::ctxt,
|
||||
let mut cx = Context::new(tcx, krate, exported_items);
|
||||
|
||||
// Visit the whole crate.
|
||||
cx.with_lint_attrs(krate.attrs.index(&FullRange), |cx| {
|
||||
cx.with_lint_attrs(&krate.attrs[], |cx| {
|
||||
cx.visit_id(ast::CRATE_NODE_ID);
|
||||
cx.visit_ids(|v| {
|
||||
v.visited_outermost = true;
|
||||
|
@ -65,7 +65,7 @@ fn dump_crates(cstore: &CStore) {
|
||||
}
|
||||
|
||||
fn should_link(i: &ast::ViewItem) -> bool {
|
||||
!attr::contains_name(i.attrs.index(&FullRange), "no_link")
|
||||
!attr::contains_name(&i.attrs[], "no_link")
|
||||
|
||||
}
|
||||
|
||||
@ -90,7 +90,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
|
||||
for c in s.chars() {
|
||||
if c.is_alphanumeric() { continue }
|
||||
if c == '_' || c == '-' { continue }
|
||||
err(format!("invalid character `{}` in crate name: `{}`", c, s).index(&FullRange));
|
||||
err(&format!("invalid character `{}` in crate name: `{}`", c, s)[]);
|
||||
}
|
||||
match sess {
|
||||
Some(sess) => sess.abort_if_errors(),
|
||||
@ -189,8 +189,8 @@ impl<'a> CrateReader<'a> {
|
||||
match self.extract_crate_info(i) {
|
||||
Some(info) => {
|
||||
let (cnum, _, _) = self.resolve_crate(&None,
|
||||
info.ident.index(&FullRange),
|
||||
info.name.index(&FullRange),
|
||||
&info.ident[],
|
||||
&info.name[],
|
||||
None,
|
||||
i.span,
|
||||
PathKind::Crate);
|
||||
@ -209,7 +209,7 @@ impl<'a> CrateReader<'a> {
|
||||
let name = match *path_opt {
|
||||
Some((ref path_str, _)) => {
|
||||
let name = path_str.get().to_string();
|
||||
validate_crate_name(Some(self.sess), name.index(&FullRange),
|
||||
validate_crate_name(Some(self.sess), &name[],
|
||||
Some(i.span));
|
||||
name
|
||||
}
|
||||
@ -275,8 +275,8 @@ impl<'a> CrateReader<'a> {
|
||||
cstore::NativeUnknown
|
||||
} else {
|
||||
self.sess.span_err(m.span,
|
||||
format!("unknown kind: `{}`",
|
||||
k).index(&FullRange));
|
||||
&format!("unknown kind: `{}`",
|
||||
k)[]);
|
||||
cstore::NativeUnknown
|
||||
}
|
||||
}
|
||||
@ -330,7 +330,7 @@ impl<'a> CrateReader<'a> {
|
||||
match self.sess.opts.externs.get(name) {
|
||||
Some(locs) => {
|
||||
let found = locs.iter().any(|l| {
|
||||
let l = fs::realpath(&Path::new(l.index(&FullRange))).ok();
|
||||
let l = fs::realpath(&Path::new(&l[])).ok();
|
||||
l == source.dylib || l == source.rlib
|
||||
});
|
||||
if found {
|
||||
@ -409,7 +409,7 @@ impl<'a> CrateReader<'a> {
|
||||
crate_name: name,
|
||||
hash: hash.map(|a| &*a),
|
||||
filesearch: self.sess.target_filesearch(kind),
|
||||
triple: self.sess.opts.target_triple.index(&FullRange),
|
||||
triple: &self.sess.opts.target_triple[],
|
||||
root: root,
|
||||
rejected_via_hash: vec!(),
|
||||
rejected_via_triple: vec!(),
|
||||
@ -435,8 +435,8 @@ impl<'a> CrateReader<'a> {
|
||||
decoder::get_crate_deps(cdata).iter().map(|dep| {
|
||||
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
|
||||
let (local_cnum, _, _) = self.resolve_crate(root,
|
||||
dep.name.index(&FullRange),
|
||||
dep.name.index(&FullRange),
|
||||
&dep.name[],
|
||||
&dep.name[],
|
||||
Some(&dep.hash),
|
||||
span,
|
||||
PathKind::Dependency);
|
||||
@ -447,7 +447,7 @@ impl<'a> CrateReader<'a> {
|
||||
pub fn read_plugin_metadata<'b>(&'b mut self,
|
||||
vi: &'b ast::ViewItem) -> PluginMetadata<'b> {
|
||||
let info = self.extract_crate_info(vi).unwrap();
|
||||
let target_triple = self.sess.opts.target_triple.index(&FullRange);
|
||||
let target_triple = &self.sess.opts.target_triple[];
|
||||
let is_cross = target_triple != config::host_triple();
|
||||
let mut should_link = info.should_link && !is_cross;
|
||||
let mut target_only = false;
|
||||
@ -456,8 +456,8 @@ impl<'a> CrateReader<'a> {
|
||||
let mut load_ctxt = loader::Context {
|
||||
sess: self.sess,
|
||||
span: vi.span,
|
||||
ident: ident.index(&FullRange),
|
||||
crate_name: name.index(&FullRange),
|
||||
ident: &ident[],
|
||||
crate_name: &name[],
|
||||
hash: None,
|
||||
filesearch: self.sess.host_filesearch(PathKind::Crate),
|
||||
triple: config::host_triple(),
|
||||
@ -485,8 +485,8 @@ impl<'a> CrateReader<'a> {
|
||||
let register = should_link && self.existing_match(info.name.as_slice(), None).is_none();
|
||||
let metadata = if register {
|
||||
// Register crate now to avoid double-reading metadata
|
||||
let (_, cmd, _) = self.register_crate(&None, info.ident.index(&FullRange),
|
||||
info.name.index(&FullRange), vi.span, library);
|
||||
let (_, cmd, _) = self.register_crate(&None, &info.ident[],
|
||||
&info.name[], vi.span, library);
|
||||
PMDSource::Registered(cmd)
|
||||
} else {
|
||||
// Not registering the crate; just hold on to the metadata
|
||||
@ -507,8 +507,8 @@ impl<'a> CrateReader<'a> {
|
||||
impl<'a> PluginMetadata<'a> {
|
||||
/// Read exported macros
|
||||
pub fn exported_macros(&self) -> Vec<ast::MacroDef> {
|
||||
let imported_from = Some(token::intern(self.info.ident.index(&FullRange)).ident());
|
||||
let source_name = format!("<{} macros>", self.info.ident.index(&FullRange));
|
||||
let imported_from = Some(token::intern(&self.info.ident[]).ident());
|
||||
let source_name = format!("<{} macros>", &self.info.ident[]);
|
||||
let mut macros = vec![];
|
||||
decoder::each_exported_macro(self.metadata.as_slice(),
|
||||
&*self.sess.cstore.intr,
|
||||
@ -550,7 +550,7 @@ impl<'a> PluginMetadata<'a> {
|
||||
self.info.ident,
|
||||
config::host_triple(),
|
||||
self.sess.opts.target_triple);
|
||||
self.sess.span_err(self.vi_span, message.index(&FullRange));
|
||||
self.sess.span_err(self.vi_span, &message[]);
|
||||
self.sess.abort_if_errors();
|
||||
}
|
||||
|
||||
@ -563,7 +563,7 @@ impl<'a> PluginMetadata<'a> {
|
||||
let message = format!("plugin crate `{}` only found in rlib format, \
|
||||
but must be available in dylib format",
|
||||
self.info.ident);
|
||||
self.sess.span_err(self.vi_span, message.index(&FullRange));
|
||||
self.sess.span_err(self.vi_span, &message[]);
|
||||
// No need to abort because the loading code will just ignore this
|
||||
// empty dylib.
|
||||
None
|
||||
|
@ -96,7 +96,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem>
|
||||
|
||||
// FIXME #1920: This path is not always correct if the crate is not linked
|
||||
// into the root namespace.
|
||||
let mut r = vec![ast_map::PathMod(token::intern(cdata.name.index(&FullRange)))];
|
||||
let mut r = vec![ast_map::PathMod(token::intern(&cdata.name[]))];
|
||||
r.push_all(path.as_slice());
|
||||
r
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ fn lookup_hash<'a, F>(d: rbml::Doc<'a>, mut eq_fn: F, hash: u64) -> Option<rbml:
|
||||
let mut ret = None;
|
||||
reader::tagged_docs(tagged_doc.doc, belt, |elt| {
|
||||
let pos = u64_from_be_bytes(elt.data, elt.start, 4) as uint;
|
||||
if eq_fn(elt.data.index(&((elt.start + 4) .. elt.end))) {
|
||||
if eq_fn(&elt.data[(elt.start + 4) .. elt.end]) {
|
||||
ret = Some(reader::doc_at(d.data, pos).unwrap().doc);
|
||||
false
|
||||
} else {
|
||||
@ -88,7 +88,7 @@ pub fn maybe_find_item<'a>(item_id: ast::NodeId,
|
||||
items: rbml::Doc<'a>) -> Option<rbml::Doc<'a>> {
|
||||
fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool {
|
||||
return u64_from_be_bytes(
|
||||
bytes.index(&(0u..4u)), 0u, 4u) as ast::NodeId
|
||||
&bytes[0u..4u], 0u, 4u) as ast::NodeId
|
||||
== item_id;
|
||||
}
|
||||
lookup_hash(items,
|
||||
@ -1190,7 +1190,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
|
||||
}
|
||||
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
|
||||
let name = docstr(depdoc, tag_crate_dep_crate_name);
|
||||
let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).index(&FullRange));
|
||||
let hash = Svh::new(&docstr(depdoc, tag_crate_dep_hash)[]);
|
||||
deps.push(CrateDep {
|
||||
cnum: crate_num,
|
||||
name: name,
|
||||
|
@ -94,7 +94,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) {
|
||||
}
|
||||
|
||||
pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
|
||||
rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).index(&FullRange));
|
||||
rbml_w.wr_tagged_str(tag_def_id, &def_to_string(id)[]);
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -153,7 +153,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) {
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(s.index(&FullRange));
|
||||
rbml_w.wr_str(&s[]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
|
||||
@ -263,7 +263,7 @@ fn encode_symbol(ecx: &EncodeContext,
|
||||
}
|
||||
None => {
|
||||
ecx.diag.handler().bug(
|
||||
format!("encode_symbol: id not found {}", id).index(&FullRange));
|
||||
&format!("encode_symbol: id not found {}", id)[]);
|
||||
}
|
||||
}
|
||||
rbml_w.end_tag();
|
||||
@ -331,8 +331,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
||||
encode_name(rbml_w, variant.node.name.name);
|
||||
encode_parent_item(rbml_w, local_def(id));
|
||||
encode_visibility(rbml_w, variant.node.vis);
|
||||
encode_attributes(rbml_w, variant.node.attrs.index(&FullRange));
|
||||
encode_repr_attrs(rbml_w, ecx, variant.node.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &variant.node.attrs[]);
|
||||
encode_repr_attrs(rbml_w, ecx, &variant.node.attrs[]);
|
||||
|
||||
let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id));
|
||||
encode_stability(rbml_w, stab);
|
||||
@ -343,9 +343,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
||||
let fields = ty::lookup_struct_fields(ecx.tcx, def_id);
|
||||
let idx = encode_info_for_struct(ecx,
|
||||
rbml_w,
|
||||
fields.index(&FullRange),
|
||||
&fields[],
|
||||
index);
|
||||
encode_struct_fields(rbml_w, fields.index(&FullRange), def_id);
|
||||
encode_struct_fields(rbml_w, &fields[], def_id);
|
||||
encode_index(rbml_w, idx, write_i64);
|
||||
}
|
||||
}
|
||||
@ -385,12 +385,12 @@ fn encode_reexported_static_method(rbml_w: &mut Encoder,
|
||||
exp.name, token::get_name(method_name));
|
||||
rbml_w.start_tag(tag_items_data_item_reexport);
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_def_id);
|
||||
rbml_w.wr_str(def_to_string(method_def_id).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(method_def_id)[]);
|
||||
rbml_w.end_tag();
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_name);
|
||||
rbml_w.wr_str(format!("{}::{}",
|
||||
rbml_w.wr_str(&format!("{}::{}",
|
||||
exp.name,
|
||||
token::get_name(method_name)).index(&FullRange));
|
||||
token::get_name(method_name))[]);
|
||||
rbml_w.end_tag();
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
@ -528,7 +528,7 @@ fn encode_reexports(ecx: &EncodeContext,
|
||||
id);
|
||||
rbml_w.start_tag(tag_items_data_item_reexport);
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_def_id);
|
||||
rbml_w.wr_str(def_to_string(exp.def_id).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(exp.def_id)[]);
|
||||
rbml_w.end_tag();
|
||||
rbml_w.start_tag(tag_items_data_item_reexport_name);
|
||||
rbml_w.wr_str(exp.name.as_str());
|
||||
@ -561,13 +561,13 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
||||
// Encode info about all the module children.
|
||||
for item in md.items.iter() {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
|
||||
rbml_w.end_tag();
|
||||
|
||||
each_auxiliary_node_id(&**item, |auxiliary_node_id| {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(def_to_string(local_def(
|
||||
auxiliary_node_id)).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(local_def(
|
||||
auxiliary_node_id))[]);
|
||||
rbml_w.end_tag();
|
||||
true
|
||||
});
|
||||
@ -579,7 +579,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
||||
did, ecx.tcx.map.node_to_string(did));
|
||||
|
||||
rbml_w.start_tag(tag_mod_impl);
|
||||
rbml_w.wr_str(def_to_string(local_def(did)).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(local_def(did))[]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
}
|
||||
@ -614,7 +614,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: ast::Visibility) {
|
||||
ast::Public => 'y',
|
||||
ast::Inherited => 'i',
|
||||
};
|
||||
rbml_w.wr_str(ch.to_string().index(&FullRange));
|
||||
rbml_w.wr_str(&ch.to_string()[]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
|
||||
@ -626,7 +626,7 @@ fn encode_unboxed_closure_kind(rbml_w: &mut Encoder,
|
||||
ty::FnMutUnboxedClosureKind => 'm',
|
||||
ty::FnOnceUnboxedClosureKind => 'o',
|
||||
};
|
||||
rbml_w.wr_str(ch.to_string().index(&FullRange));
|
||||
rbml_w.wr_str(&ch.to_string()[]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
|
||||
@ -787,7 +787,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder,
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.wr_tagged_str(tag_region_param_def_def_id,
|
||||
def_to_string(param.def_id).index(&FullRange));
|
||||
&def_to_string(param.def_id)[]);
|
||||
|
||||
rbml_w.wr_tagged_u64(tag_region_param_def_space,
|
||||
param.space.to_uint() as u64);
|
||||
@ -863,9 +863,9 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
|
||||
encode_path(rbml_w, impl_path.chain(Some(elem).into_iter()));
|
||||
match ast_item_opt {
|
||||
Some(&ast::MethodImplItem(ref ast_method)) => {
|
||||
encode_attributes(rbml_w, ast_method.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &ast_method.attrs[]);
|
||||
let any_types = !pty.generics.types.is_empty();
|
||||
if any_types || is_default_impl || should_inline(ast_method.attrs.index(&FullRange)) {
|
||||
if any_types || is_default_impl || should_inline(&ast_method.attrs[]) {
|
||||
encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id),
|
||||
ast_item_opt.unwrap()));
|
||||
}
|
||||
@ -911,7 +911,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext,
|
||||
match typedef_opt {
|
||||
None => {}
|
||||
Some(typedef) => {
|
||||
encode_attributes(rbml_w, typedef.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &typedef.attrs[]);
|
||||
encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx,
|
||||
typedef.id));
|
||||
}
|
||||
@ -1045,7 +1045,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_path(rbml_w, path);
|
||||
encode_visibility(rbml_w, vis);
|
||||
encode_stability(rbml_w, stab);
|
||||
encode_attributes(rbml_w, item.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
ast::ItemConst(_, _) => {
|
||||
@ -1071,8 +1071,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_path(rbml_w, path);
|
||||
encode_attributes(rbml_w, item.attrs.index(&FullRange));
|
||||
if tps_len > 0u || should_inline(item.attrs.index(&FullRange)) {
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
if tps_len > 0u || should_inline(&item.attrs[]) {
|
||||
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
|
||||
}
|
||||
if tps_len == 0 {
|
||||
@ -1088,7 +1088,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_info_for_mod(ecx,
|
||||
rbml_w,
|
||||
m,
|
||||
item.attrs.index(&FullRange),
|
||||
&item.attrs[],
|
||||
item.id,
|
||||
path,
|
||||
item.ident,
|
||||
@ -1105,7 +1105,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
// Encode all the items in this module.
|
||||
for foreign_item in fm.items.iter() {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(local_def(foreign_item.id))[]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
encode_visibility(rbml_w, vis);
|
||||
@ -1133,8 +1133,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_item_variances(rbml_w, ecx, item.id);
|
||||
encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, item.attrs.index(&FullRange));
|
||||
encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_repr_attrs(rbml_w, ecx, &item.attrs[]);
|
||||
for v in (*enum_definition).variants.iter() {
|
||||
encode_variant_id(rbml_w, local_def(v.node.id));
|
||||
}
|
||||
@ -1151,7 +1151,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_enum_variant_info(ecx,
|
||||
rbml_w,
|
||||
item.id,
|
||||
(*enum_definition).variants.index(&FullRange),
|
||||
&(*enum_definition).variants[],
|
||||
index);
|
||||
}
|
||||
ast::ItemStruct(ref struct_def, _) => {
|
||||
@ -1163,7 +1163,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
class itself */
|
||||
let idx = encode_info_for_struct(ecx,
|
||||
rbml_w,
|
||||
fields.index(&FullRange),
|
||||
&fields[],
|
||||
index);
|
||||
|
||||
/* Index the class*/
|
||||
@ -1177,16 +1177,16 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
|
||||
encode_item_variances(rbml_w, ecx, item.id);
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, item.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_path(rbml_w, path.clone());
|
||||
encode_stability(rbml_w, stab);
|
||||
encode_visibility(rbml_w, vis);
|
||||
encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange));
|
||||
encode_repr_attrs(rbml_w, ecx, &item.attrs[]);
|
||||
|
||||
/* Encode def_ids for each field and method
|
||||
for methods, write all the stuff get_trait_method
|
||||
needs to know*/
|
||||
encode_struct_fields(rbml_w, fields.index(&FullRange), def_id);
|
||||
encode_struct_fields(rbml_w, &fields[], def_id);
|
||||
|
||||
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
|
||||
|
||||
@ -1218,7 +1218,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_family(rbml_w, 'i');
|
||||
encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, item.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_unsafety(rbml_w, unsafety);
|
||||
encode_polarity(rbml_w, polarity);
|
||||
match ty.node {
|
||||
@ -1322,7 +1322,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
encode_generics(rbml_w, ecx, &trait_def.generics, tag_item_generics);
|
||||
encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref);
|
||||
encode_name(rbml_w, item.ident.name);
|
||||
encode_attributes(rbml_w, item.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &item.attrs[]);
|
||||
encode_visibility(rbml_w, vis);
|
||||
encode_stability(rbml_w, stab);
|
||||
for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() {
|
||||
@ -1340,7 +1340,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(def_to_string(method_def_id.def_id()).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(method_def_id.def_id())[]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
encode_path(rbml_w, path.clone());
|
||||
@ -1432,14 +1432,14 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
};
|
||||
match trait_item {
|
||||
&ast::RequiredMethod(ref m) => {
|
||||
encode_attributes(rbml_w, m.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &m.attrs[]);
|
||||
encode_trait_item(rbml_w);
|
||||
encode_item_sort(rbml_w, 'r');
|
||||
encode_method_argument_names(rbml_w, &*m.decl);
|
||||
}
|
||||
|
||||
&ast::ProvidedMethod(ref m) => {
|
||||
encode_attributes(rbml_w, m.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &m.attrs[]);
|
||||
encode_trait_item(rbml_w);
|
||||
encode_item_sort(rbml_w, 'p');
|
||||
encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item));
|
||||
@ -1448,7 +1448,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
|
||||
&ast::TypeTraitItem(ref associated_type) => {
|
||||
encode_attributes(rbml_w,
|
||||
associated_type.attrs.index(&FullRange));
|
||||
&associated_type.attrs[]);
|
||||
encode_item_sort(rbml_w, 't');
|
||||
}
|
||||
}
|
||||
@ -1827,10 +1827,10 @@ fn encode_macro_defs(rbml_w: &mut Encoder,
|
||||
rbml_w.start_tag(tag_macro_def);
|
||||
|
||||
encode_name(rbml_w, def.ident.name);
|
||||
encode_attributes(rbml_w, def.attrs.index(&FullRange));
|
||||
encode_attributes(rbml_w, &def.attrs[]);
|
||||
|
||||
rbml_w.start_tag(tag_macro_def_body);
|
||||
rbml_w.wr_str(pprust::tts_to_string(def.body.index(&FullRange)).index(&FullRange));
|
||||
rbml_w.wr_str(&pprust::tts_to_string(&def.body[])[]);
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.end_tag();
|
||||
@ -1870,7 +1870,7 @@ fn encode_struct_field_attrs(rbml_w: &mut Encoder, krate: &ast::Crate) {
|
||||
fn visit_struct_field(&mut self, field: &ast::StructField) {
|
||||
self.rbml_w.start_tag(tag_struct_field);
|
||||
self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id);
|
||||
encode_attributes(self.rbml_w, field.node.attrs.index(&FullRange));
|
||||
encode_attributes(self.rbml_w, &field.node.attrs[]);
|
||||
self.rbml_w.end_tag();
|
||||
}
|
||||
}
|
||||
@ -1942,13 +1942,13 @@ fn encode_misc_info(ecx: &EncodeContext,
|
||||
rbml_w.start_tag(tag_misc_info_crate_items);
|
||||
for item in krate.module.items.iter() {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
|
||||
rbml_w.end_tag();
|
||||
|
||||
each_auxiliary_node_id(&**item, |auxiliary_node_id| {
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(def_to_string(local_def(
|
||||
auxiliary_node_id)).index(&FullRange));
|
||||
rbml_w.wr_str(&def_to_string(local_def(
|
||||
auxiliary_node_id))[]);
|
||||
rbml_w.end_tag();
|
||||
true
|
||||
});
|
||||
@ -2117,17 +2117,17 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter,
|
||||
|
||||
let mut rbml_w = writer::Encoder::new(wr);
|
||||
|
||||
encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.index(&FullRange));
|
||||
encode_crate_name(&mut rbml_w, &ecx.link_meta.crate_name[]);
|
||||
encode_crate_triple(&mut rbml_w,
|
||||
tcx.sess
|
||||
&tcx.sess
|
||||
.opts
|
||||
.target_triple
|
||||
.index(&FullRange));
|
||||
[]);
|
||||
encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash);
|
||||
encode_dylib_dependency_formats(&mut rbml_w, &ecx);
|
||||
|
||||
let mut i = rbml_w.writer.tell().unwrap();
|
||||
encode_attributes(&mut rbml_w, krate.attrs.index(&FullRange));
|
||||
encode_attributes(&mut rbml_w, &krate.attrs[]);
|
||||
stats.attr_bytes = rbml_w.writer.tell().unwrap() - i;
|
||||
|
||||
i = rbml_w.writer.tell().unwrap();
|
||||
|
@ -315,14 +315,14 @@ impl<'a> Context<'a> {
|
||||
&Some(ref r) => format!("{} which `{}` depends on",
|
||||
message, r.ident)
|
||||
};
|
||||
self.sess.span_err(self.span, message.index(&FullRange));
|
||||
self.sess.span_err(self.span, &message[]);
|
||||
|
||||
if self.rejected_via_triple.len() > 0 {
|
||||
let mismatches = self.rejected_via_triple.iter();
|
||||
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
format!("crate `{}`, path #{}, triple {}: {}",
|
||||
self.ident, i+1, got, path.display()).index(&FullRange));
|
||||
&format!("crate `{}`, path #{}, triple {}: {}",
|
||||
self.ident, i+1, got, path.display())[]);
|
||||
}
|
||||
}
|
||||
if self.rejected_via_hash.len() > 0 {
|
||||
@ -331,16 +331,16 @@ impl<'a> Context<'a> {
|
||||
let mismatches = self.rejected_via_hash.iter();
|
||||
for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
format!("crate `{}` path {}{}: {}",
|
||||
self.ident, "#", i+1, path.display()).index(&FullRange));
|
||||
&format!("crate `{}` path {}{}: {}",
|
||||
self.ident, "#", i+1, path.display())[]);
|
||||
}
|
||||
match self.root {
|
||||
&None => {}
|
||||
&Some(ref r) => {
|
||||
for (i, path) in r.paths().iter().enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
format!("crate `{}` path #{}: {}",
|
||||
r.ident, i+1, path.display()).index(&FullRange));
|
||||
&format!("crate `{}` path #{}: {}",
|
||||
r.ident, i+1, path.display())[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -386,7 +386,7 @@ impl<'a> Context<'a> {
|
||||
None => return FileDoesntMatch,
|
||||
Some(file) => file,
|
||||
};
|
||||
let (hash, rlib) = if file.starts_with(rlib_prefix.index(&FullRange)) &&
|
||||
let (hash, rlib) = if file.starts_with(&rlib_prefix[]) &&
|
||||
file.ends_with(".rlib") {
|
||||
(file.slice(rlib_prefix.len(), file.len() - ".rlib".len()),
|
||||
true)
|
||||
@ -445,27 +445,27 @@ impl<'a> Context<'a> {
|
||||
1 => Some(libraries.into_iter().next().unwrap()),
|
||||
_ => {
|
||||
self.sess.span_err(self.span,
|
||||
format!("multiple matching crates for `{}`",
|
||||
self.crate_name).index(&FullRange));
|
||||
&format!("multiple matching crates for `{}`",
|
||||
self.crate_name)[]);
|
||||
self.sess.note("candidates:");
|
||||
for lib in libraries.iter() {
|
||||
match lib.dylib {
|
||||
Some(ref p) => {
|
||||
self.sess.note(format!("path: {}",
|
||||
p.display()).index(&FullRange));
|
||||
self.sess.note(&format!("path: {}",
|
||||
p.display())[]);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
match lib.rlib {
|
||||
Some(ref p) => {
|
||||
self.sess.note(format!("path: {}",
|
||||
p.display()).index(&FullRange));
|
||||
self.sess.note(&format!("path: {}",
|
||||
p.display())[]);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
let data = lib.metadata.as_slice();
|
||||
let name = decoder::get_crate_name(data);
|
||||
note_crate_name(self.sess.diagnostic(), name.index(&FullRange));
|
||||
note_crate_name(self.sess.diagnostic(), &name[]);
|
||||
}
|
||||
None
|
||||
}
|
||||
@ -516,22 +516,22 @@ impl<'a> Context<'a> {
|
||||
};
|
||||
if ret.is_some() {
|
||||
self.sess.span_err(self.span,
|
||||
format!("multiple {} candidates for `{}` \
|
||||
&format!("multiple {} candidates for `{}` \
|
||||
found",
|
||||
flavor,
|
||||
self.crate_name).index(&FullRange));
|
||||
self.crate_name)[]);
|
||||
self.sess.span_note(self.span,
|
||||
format!(r"candidate #1: {}",
|
||||
&format!(r"candidate #1: {}",
|
||||
ret.as_ref().unwrap()
|
||||
.display()).index(&FullRange));
|
||||
.display())[]);
|
||||
error = 1;
|
||||
ret = None;
|
||||
}
|
||||
if error > 0 {
|
||||
error += 1;
|
||||
self.sess.span_note(self.span,
|
||||
format!(r"candidate #{}: {}", error,
|
||||
lib.display()).index(&FullRange));
|
||||
&format!(r"candidate #{}: {}", error,
|
||||
lib.display())[]);
|
||||
continue
|
||||
}
|
||||
*slot = Some(metadata);
|
||||
@ -606,17 +606,17 @@ impl<'a> Context<'a> {
|
||||
let mut rlibs = HashSet::new();
|
||||
let mut dylibs = HashSet::new();
|
||||
{
|
||||
let mut locs = locs.iter().map(|l| Path::new(l.index(&FullRange))).filter(|loc| {
|
||||
let mut locs = locs.iter().map(|l| Path::new(&l[])).filter(|loc| {
|
||||
if !loc.exists() {
|
||||
sess.err(format!("extern location for {} does not exist: {}",
|
||||
self.crate_name, loc.display()).index(&FullRange));
|
||||
sess.err(&format!("extern location for {} does not exist: {}",
|
||||
self.crate_name, loc.display())[]);
|
||||
return false;
|
||||
}
|
||||
let file = match loc.filename_str() {
|
||||
Some(file) => file,
|
||||
None => {
|
||||
sess.err(format!("extern location for {} is not a file: {}",
|
||||
self.crate_name, loc.display()).index(&FullRange));
|
||||
sess.err(&format!("extern location for {} is not a file: {}",
|
||||
self.crate_name, loc.display())[]);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@ -624,13 +624,13 @@ impl<'a> Context<'a> {
|
||||
return true
|
||||
} else {
|
||||
let (ref prefix, ref suffix) = dylibname;
|
||||
if file.starts_with(prefix.index(&FullRange)) &&
|
||||
file.ends_with(suffix.index(&FullRange)) {
|
||||
if file.starts_with(&prefix[]) &&
|
||||
file.ends_with(&suffix[]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
sess.err(format!("extern location for {} is of an unknown type: {}",
|
||||
self.crate_name, loc.display()).index(&FullRange));
|
||||
sess.err(&format!("extern location for {} is of an unknown type: {}",
|
||||
self.crate_name, loc.display())[]);
|
||||
false
|
||||
});
|
||||
|
||||
@ -663,7 +663,7 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
|
||||
pub fn note_crate_name(diag: &SpanHandler, name: &str) {
|
||||
diag.handler().note(format!("crate name: {}", name).index(&FullRange));
|
||||
diag.handler().note(&format!("crate name: {}", name)[]);
|
||||
}
|
||||
|
||||
impl ArchiveMetadata {
|
||||
|
@ -98,7 +98,7 @@ fn scan<R, F, G>(st: &mut PState, mut is_last: F, op: G) -> R where
|
||||
}
|
||||
let end_pos = st.pos;
|
||||
st.pos += 1;
|
||||
return op(st.data.index(&(start_pos..end_pos)));
|
||||
return op(&st.data[start_pos..end_pos]);
|
||||
}
|
||||
|
||||
pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident {
|
||||
@ -250,8 +250,8 @@ fn parse_trait_store_<F>(st: &mut PState, conv: &mut F) -> ty::TraitStore where
|
||||
'~' => ty::UniqTraitStore,
|
||||
'&' => ty::RegionTraitStore(parse_region_(st, conv), parse_mutability(st)),
|
||||
c => {
|
||||
st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'",
|
||||
c).index(&FullRange))
|
||||
st.tcx.sess.bug(&format!("parse_trait_store(): bad input '{}'",
|
||||
c)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -318,7 +318,7 @@ fn parse_bound_region_<F>(st: &mut PState, conv: &mut F) -> ty::BoundRegion wher
|
||||
}
|
||||
'[' => {
|
||||
let def = parse_def_(st, RegionParameter, conv);
|
||||
let ident = token::str_to_ident(parse_str(st, ']').index(&FullRange));
|
||||
let ident = token::str_to_ident(&parse_str(st, ']')[]);
|
||||
ty::BrNamed(def, ident.name)
|
||||
}
|
||||
'f' => {
|
||||
@ -357,7 +357,7 @@ fn parse_region_<F>(st: &mut PState, conv: &mut F) -> ty::Region where
|
||||
assert_eq!(next(st), '|');
|
||||
let index = parse_u32(st);
|
||||
assert_eq!(next(st), '|');
|
||||
let nm = token::str_to_ident(parse_str(st, ']').index(&FullRange));
|
||||
let nm = token::str_to_ident(&parse_str(st, ']')[]);
|
||||
ty::ReEarlyBound(node_id, space, index, nm.name)
|
||||
}
|
||||
'f' => {
|
||||
@ -481,7 +481,7 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w
|
||||
assert_eq!(next(st), '|');
|
||||
let space = parse_param_space(st);
|
||||
assert_eq!(next(st), '|');
|
||||
let name = token::intern(parse_str(st, ']').index(&FullRange));
|
||||
let name = token::intern(&parse_str(st, ']')[]);
|
||||
return ty::mk_param(tcx, space, index, name);
|
||||
}
|
||||
'~' => return ty::mk_uniq(tcx, parse_ty_(st, conv)),
|
||||
@ -637,7 +637,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi {
|
||||
assert_eq!(next(st), '[');
|
||||
scan(st, |c| c == ']', |bytes| {
|
||||
let abi_str = str::from_utf8(bytes).unwrap();
|
||||
abi::lookup(abi_str.index(&FullRange)).expect(abi_str)
|
||||
abi::lookup(&abi_str[]).expect(abi_str)
|
||||
})
|
||||
}
|
||||
|
||||
@ -733,8 +733,8 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
|
||||
panic!();
|
||||
}
|
||||
|
||||
let crate_part = buf.index(&(0u..colon_idx));
|
||||
let def_part = buf.index(&((colon_idx + 1u)..len));
|
||||
let crate_part = &buf[0u..colon_idx];
|
||||
let def_part = &buf[(colon_idx + 1u)..len];
|
||||
|
||||
let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::<uint>()) {
|
||||
Some(cn) => cn as ast::CrateNum,
|
||||
|
@ -47,8 +47,8 @@ pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty)
|
||||
let a_def = match tcx.def_map.borrow().get(&id) {
|
||||
None => {
|
||||
tcx.sess.span_bug(ast_ty.span,
|
||||
format!("unbound path {}",
|
||||
path.repr(tcx)).index(&FullRange))
|
||||
&format!("unbound path {}",
|
||||
path.repr(tcx))[])
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
|
@ -132,7 +132,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
|
||||
// Do an Option dance to use the path after it is moved below.
|
||||
let s = ast_map::path_to_string(ast_map::Values(path.iter()));
|
||||
path_as_str = Some(s);
|
||||
path_as_str.as_ref().map(|x| x.index(&FullRange))
|
||||
path_as_str.as_ref().map(|x| &x[])
|
||||
});
|
||||
let mut ast_dsr = reader::Decoder::new(ast_doc);
|
||||
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
|
||||
@ -1876,8 +1876,8 @@ fn decode_side_tables(dcx: &DecodeContext,
|
||||
match c::astencode_tag::from_uint(tag) {
|
||||
None => {
|
||||
dcx.tcx.sess.bug(
|
||||
format!("unknown tag found in side tables: {:x}",
|
||||
tag).index(&FullRange));
|
||||
&format!("unknown tag found in side tables: {:x}",
|
||||
tag)[]);
|
||||
}
|
||||
Some(value) => {
|
||||
let val_doc = entry_doc.get(c::tag_table_val as uint);
|
||||
@ -1961,8 +1961,8 @@ fn decode_side_tables(dcx: &DecodeContext,
|
||||
}
|
||||
_ => {
|
||||
dcx.tcx.sess.bug(
|
||||
format!("unknown tag found in side tables: {:x}",
|
||||
tag).index(&FullRange));
|
||||
&format!("unknown tag found in side tables: {:x}",
|
||||
tag)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -362,7 +362,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
let mut cond_exit = discr_exit;
|
||||
for arm in arms.iter() {
|
||||
cond_exit = self.add_dummy_node(&[cond_exit]); // 2
|
||||
let pats_exit = self.pats_any(arm.pats.index(&FullRange),
|
||||
let pats_exit = self.pats_any(&arm.pats[],
|
||||
cond_exit); // 3
|
||||
let guard_exit = self.opt_expr(&arm.guard,
|
||||
pats_exit); // 4
|
||||
@ -615,15 +615,15 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
}
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("no loop scope for id {}",
|
||||
loop_id).index(&FullRange));
|
||||
&format!("no loop scope for id {}",
|
||||
loop_id)[]);
|
||||
}
|
||||
|
||||
r => {
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("bad entry `{:?}` in def_map for label",
|
||||
r).index(&FullRange));
|
||||
&format!("bad entry `{:?}` in def_map for label",
|
||||
r)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ fn replace_newline_with_backslash_l(s: String) -> String {
|
||||
}
|
||||
|
||||
impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
|
||||
fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.index(&FullRange)).unwrap() }
|
||||
fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[]).unwrap() }
|
||||
|
||||
fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> {
|
||||
dot::Id::new(format!("N{}", i.node_id())).unwrap()
|
||||
@ -85,9 +85,9 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
|
||||
let s = self.ast_map.node_to_string(node_id);
|
||||
// left-aligns the lines
|
||||
let s = replace_newline_with_backslash_l(s);
|
||||
label.push_str(format!("exiting scope_{} {}",
|
||||
label.push_str(&format!("exiting scope_{} {}",
|
||||
i,
|
||||
s.index(&FullRange)).index(&FullRange));
|
||||
&s[])[]);
|
||||
}
|
||||
dot::LabelText::EscStr(label.into_cow())
|
||||
}
|
||||
|
@ -74,11 +74,11 @@ impl<'a> CheckLoopVisitor<'a> {
|
||||
Loop => {}
|
||||
Closure => {
|
||||
self.sess.span_err(span,
|
||||
format!("`{}` inside of a closure", name).index(&FullRange));
|
||||
&format!("`{}` inside of a closure", name)[]);
|
||||
}
|
||||
Normal => {
|
||||
self.sess.span_err(span,
|
||||
format!("`{}` outside of loop", name).index(&FullRange));
|
||||
&format!("`{}` outside of loop", name)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ struct Matrix<'a>(Vec<Vec<&'a Pat>>);
|
||||
|
||||
/// Pretty-printer for matrices of patterns, example:
|
||||
/// ++++++++++++++++++++++++++
|
||||
/// + _ + .index(&FullRange) +
|
||||
/// + _ + [] +
|
||||
/// ++++++++++++++++++++++++++
|
||||
/// + true + [First] +
|
||||
/// ++++++++++++++++++++++++++
|
||||
@ -161,7 +161,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
|
||||
// First, check legality of move bindings.
|
||||
check_legality_of_move_bindings(cx,
|
||||
arm.guard.is_some(),
|
||||
arm.pats.index(&FullRange));
|
||||
&arm.pats[]);
|
||||
|
||||
// Second, if there is a guard on each arm, make sure it isn't
|
||||
// assigning or borrowing anything mutably.
|
||||
@ -198,7 +198,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
|
||||
}
|
||||
|
||||
// Fourth, check for unreachable arms.
|
||||
check_arms(cx, inlined_arms.index(&FullRange), source);
|
||||
check_arms(cx, &inlined_arms[], source);
|
||||
|
||||
// Finally, check if the whole match expression is exhaustive.
|
||||
// Check for empty enum, because is_useful only works on inhabited types.
|
||||
@ -228,9 +228,9 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
|
||||
is_refutable(cx, &*static_inliner.fold_pat((*pat).clone()), |uncovered_pat| {
|
||||
cx.tcx.sess.span_err(
|
||||
pat.span,
|
||||
format!("refutable pattern in `for` loop binding: \
|
||||
&format!("refutable pattern in `for` loop binding: \
|
||||
`{}` not covered",
|
||||
pat_to_string(uncovered_pat)).index(&FullRange));
|
||||
pat_to_string(uncovered_pat))[]);
|
||||
});
|
||||
|
||||
// Check legality of move bindings.
|
||||
@ -303,7 +303,7 @@ fn check_arms(cx: &MatchCheckCtxt,
|
||||
for pat in pats.iter() {
|
||||
let v = vec![&**pat];
|
||||
|
||||
match is_useful(cx, &seen, v.index(&FullRange), LeaveOutWitness) {
|
||||
match is_useful(cx, &seen, &v[], LeaveOutWitness) {
|
||||
NotUseful => {
|
||||
match source {
|
||||
ast::MatchSource::IfLetDesugar { .. } => {
|
||||
@ -355,7 +355,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat {
|
||||
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) {
|
||||
match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) {
|
||||
UsefulWithWitness(pats) => {
|
||||
let witness = match pats.index(&FullRange) {
|
||||
let witness = match &pats[] {
|
||||
[ref witness] => &**witness,
|
||||
[] => DUMMY_WILD_PAT,
|
||||
_ => unreachable!()
|
||||
@ -609,7 +609,7 @@ fn is_useful(cx: &MatchCheckCtxt,
|
||||
UsefulWithWitness(pats) => UsefulWithWitness({
|
||||
let arity = constructor_arity(cx, &c, left_ty);
|
||||
let mut result = {
|
||||
let pat_slice = pats.index(&FullRange);
|
||||
let pat_slice = &pats[];
|
||||
let subpats: Vec<_> = range(0, arity).map(|i| {
|
||||
pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p)
|
||||
}).collect();
|
||||
@ -656,10 +656,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix,
|
||||
witness: WitnessPreference) -> Usefulness {
|
||||
let arity = constructor_arity(cx, &ctor, lty);
|
||||
let matrix = Matrix(m.iter().filter_map(|r| {
|
||||
specialize(cx, r.index(&FullRange), &ctor, 0u, arity)
|
||||
specialize(cx, &r[], &ctor, 0u, arity)
|
||||
}).collect());
|
||||
match specialize(cx, v, &ctor, 0u, arity) {
|
||||
Some(v) => is_useful(cx, &matrix, v.index(&FullRange), witness),
|
||||
Some(v) => is_useful(cx, &matrix, &v[], witness),
|
||||
None => NotUseful
|
||||
}
|
||||
}
|
||||
@ -729,7 +729,7 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat,
|
||||
/// This computes the arity of a constructor. The arity of a constructor
|
||||
/// is how many subpattern patterns of that constructor should be expanded to.
|
||||
///
|
||||
/// For instance, a tuple pattern (_, 42u, Some(.index(&FullRange))) has the arity of 3.
|
||||
/// For instance, a tuple pattern (_, 42u, Some([])) has the arity of 3.
|
||||
/// A struct pattern's arity is the number of fields it contains, etc.
|
||||
pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint {
|
||||
match ty.sty {
|
||||
@ -926,8 +926,8 @@ pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat],
|
||||
}
|
||||
};
|
||||
head.map(|mut head| {
|
||||
head.push_all(r.index(&(0..col)));
|
||||
head.push_all(r.index(&((col + 1)..)));
|
||||
head.push_all(&r[0..col]);
|
||||
head.push_all(&r[(col + 1)..]);
|
||||
head
|
||||
})
|
||||
}
|
||||
@ -1041,10 +1041,10 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
||||
_ => {
|
||||
cx.tcx.sess.span_bug(
|
||||
p.span,
|
||||
format!("binding pattern {} is not an \
|
||||
&format!("binding pattern {} is not an \
|
||||
identifier: {:?}",
|
||||
p.id,
|
||||
p.node).index(&FullRange));
|
||||
p.node)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -111,8 +111,8 @@ impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> {
|
||||
return
|
||||
};
|
||||
|
||||
self.tcx.sess.span_err(e.span, format!("mutable statics are not allowed \
|
||||
to have {}", suffix).index(&FullRange));
|
||||
self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
|
||||
to have {}", suffix)[]);
|
||||
}
|
||||
|
||||
fn check_static_type(&self, e: &ast::Expr) {
|
||||
@ -169,8 +169,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> {
|
||||
ty::ty_struct(did, _) |
|
||||
ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => {
|
||||
self.tcx.sess.span_err(e.span,
|
||||
format!("{} are not allowed to have \
|
||||
destructors", self.msg()).index(&FullRange))
|
||||
&format!("{} are not allowed to have \
|
||||
destructors", self.msg())[])
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -234,7 +234,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> {
|
||||
let msg = "constants cannot refer to other statics, \
|
||||
insert an intermediate constant \
|
||||
instead";
|
||||
self.tcx.sess.span_err(e.span, msg.index(&FullRange));
|
||||
self.tcx.sess.span_err(e.span, &msg[]);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -104,8 +104,8 @@ impl<'a, 'ast, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a, 'ast> {
|
||||
ast_map::NodeForeignItem(_) => {},
|
||||
_ => {
|
||||
self.sess.span_err(e.span,
|
||||
format!("expected item, found {}",
|
||||
self.ast_map.node_to_string(def_id.node)).index(&FullRange));
|
||||
&format!("expected item, found {}",
|
||||
self.ast_map.node_to_string(def_id.node))[]);
|
||||
return;
|
||||
},
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ use std::rc::Rc;
|
||||
// target uses". This _includes_ integer-constants, plus the following
|
||||
// constructors:
|
||||
//
|
||||
// fixed-size vectors and strings: .index(&FullRange) and ""/_
|
||||
// fixed-size vectors and strings: [] and ""/_
|
||||
// vector and string slices: &[] and &""
|
||||
// tuples: (,)
|
||||
// enums: foo(...)
|
||||
@ -117,7 +117,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
|
||||
None => None,
|
||||
Some(ast_map::NodeItem(it)) => match it.node {
|
||||
ast::ItemEnum(ast::EnumDef { ref variants }, _) => {
|
||||
variant_expr(variants.index(&FullRange), variant_def.node)
|
||||
variant_expr(&variants[], variant_def.node)
|
||||
}
|
||||
_ => None
|
||||
},
|
||||
@ -138,7 +138,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
|
||||
// NOTE this doesn't do the right thing, it compares inlined
|
||||
// NodeId's to the original variant_def's NodeId, but they
|
||||
// come from different crates, so they will likely never match.
|
||||
variant_expr(variants.index(&FullRange), variant_def.node).map(|e| e.id)
|
||||
variant_expr(&variants[], variant_def.node).map(|e| e.id)
|
||||
}
|
||||
_ => None
|
||||
},
|
||||
@ -364,7 +364,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P<ast::Pat> {
|
||||
pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
|
||||
match eval_const_expr_partial(tcx, e) {
|
||||
Ok(r) => r,
|
||||
Err(s) => tcx.sess.span_fatal(e.span, s.index(&FullRange))
|
||||
Err(s) => tcx.sess.span_fatal(e.span, &s[])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -312,7 +312,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
|
||||
let mut t = on_entry.to_vec();
|
||||
self.apply_gen_kill(cfgidx, t.as_mut_slice());
|
||||
temp_bits = t;
|
||||
temp_bits.index(&FullRange)
|
||||
&temp_bits[]
|
||||
}
|
||||
};
|
||||
debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}",
|
||||
@ -421,7 +421,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
|
||||
let bits = self.kills.slice_mut(start, end);
|
||||
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]",
|
||||
self.analysis_name, flow_exit, mut_bits_to_string(bits));
|
||||
bits.clone_from_slice(orig_kills.index(&FullRange));
|
||||
bits.clone_from_slice(&orig_kills[]);
|
||||
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]",
|
||||
self.analysis_name, flow_exit, mut_bits_to_string(bits));
|
||||
}
|
||||
@ -554,7 +554,7 @@ fn bits_to_string(words: &[uint]) -> String {
|
||||
let mut v = word;
|
||||
for _ in range(0u, uint::BYTES) {
|
||||
result.push(sep);
|
||||
result.push_str(format!("{:02x}", v & 0xFF).index(&FullRange));
|
||||
result.push_str(&format!("{:02x}", v & 0xFF)[]);
|
||||
v >>= 8;
|
||||
sep = '-';
|
||||
}
|
||||
|
@ -117,8 +117,8 @@ fn calculate_type(sess: &session::Session,
|
||||
sess.cstore.iter_crate_data(|cnum, data| {
|
||||
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
|
||||
if src.rlib.is_some() { return }
|
||||
sess.err(format!("dependency `{}` not found in rlib format",
|
||||
data.name).index(&FullRange));
|
||||
sess.err(&format!("dependency `{}` not found in rlib format",
|
||||
data.name)[]);
|
||||
});
|
||||
return Vec::new();
|
||||
}
|
||||
@ -191,13 +191,13 @@ fn calculate_type(sess: &session::Session,
|
||||
Some(cstore::RequireDynamic) if src.dylib.is_some() => continue,
|
||||
Some(kind) => {
|
||||
let data = sess.cstore.get_crate_data(cnum + 1);
|
||||
sess.err(format!("crate `{}` required to be available in {}, \
|
||||
sess.err(&format!("crate `{}` required to be available in {}, \
|
||||
but it was not available in this form",
|
||||
data.name,
|
||||
match kind {
|
||||
cstore::RequireStatic => "rlib",
|
||||
cstore::RequireDynamic => "dylib",
|
||||
}).index(&FullRange));
|
||||
})[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -220,9 +220,9 @@ fn add_library(sess: &session::Session,
|
||||
// can be refined over time.
|
||||
if link2 != link || link == cstore::RequireStatic {
|
||||
let data = sess.cstore.get_crate_data(cnum);
|
||||
sess.err(format!("cannot satisfy dependencies so `{}` only \
|
||||
sess.err(&format!("cannot satisfy dependencies so `{}` only \
|
||||
shows up once",
|
||||
data.name).index(&FullRange));
|
||||
data.name)[]);
|
||||
sess.help("having upstream crates all available in one format \
|
||||
will likely make this go away");
|
||||
}
|
||||
|
@ -857,8 +857,8 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
|
||||
let (m, r) = match self_ty.sty {
|
||||
ty::ty_rptr(r, ref m) => (m.mutbl, r),
|
||||
_ => self.tcx().sess.span_bug(expr.span,
|
||||
format!("bad overloaded deref type {}",
|
||||
method_ty.repr(self.tcx())).index(&FullRange))
|
||||
&format!("bad overloaded deref type {}",
|
||||
method_ty.repr(self.tcx()))[])
|
||||
};
|
||||
let bk = ty::BorrowKind::from_mutbl(m);
|
||||
self.delegate.borrow(expr.id, expr.span, cmt,
|
||||
@ -1180,7 +1180,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
|
||||
let msg = format!("Pattern has unexpected def: {:?} and type {}",
|
||||
def,
|
||||
cmt_pat.ty.repr(tcx));
|
||||
tcx.sess.span_bug(pat.span, msg.index(&FullRange))
|
||||
tcx.sess.span_bug(pat.span, &msg[])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ pub trait Combine<'tcx> : Sized {
|
||||
for _ in a_regions.iter() {
|
||||
invariance.push(ty::Invariant);
|
||||
}
|
||||
invariance.index(&FullRange)
|
||||
&invariance[]
|
||||
}
|
||||
};
|
||||
|
||||
@ -477,10 +477,10 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C,
|
||||
(&ty::ty_infer(TyVar(_)), _) |
|
||||
(_, &ty::ty_infer(TyVar(_))) => {
|
||||
tcx.sess.bug(
|
||||
format!("{}: bot and var types should have been handled ({},{})",
|
||||
&format!("{}: bot and var types should have been handled ({},{})",
|
||||
this.tag(),
|
||||
a.repr(this.infcx().tcx),
|
||||
b.repr(this.infcx().tcx)).index(&FullRange));
|
||||
b.repr(this.infcx().tcx))[]);
|
||||
}
|
||||
|
||||
(&ty::ty_err, _) | (_, &ty::ty_err) => {
|
||||
@ -855,8 +855,8 @@ impl<'cx, 'tcx> ty_fold::TypeFolder<'tcx> for Generalizer<'cx, 'tcx> {
|
||||
ty::ReEarlyBound(..) => {
|
||||
self.tcx().sess.span_bug(
|
||||
self.span,
|
||||
format!("Encountered early bound region when generalizing: {}",
|
||||
r.repr(self.tcx())).index(&FullRange));
|
||||
&format!("Encountered early bound region when generalizing: {}",
|
||||
r.repr(self.tcx()))[]);
|
||||
}
|
||||
|
||||
// Always make a fresh region variable for skolemized regions;
|
||||
|
@ -200,9 +200,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
ref trace_origins,
|
||||
ref same_regions) => {
|
||||
if !same_regions.is_empty() {
|
||||
self.report_processed_errors(var_origins.index(&FullRange),
|
||||
trace_origins.index(&FullRange),
|
||||
same_regions.index(&FullRange));
|
||||
self.report_processed_errors(&var_origins[],
|
||||
&trace_origins[],
|
||||
&same_regions[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -373,10 +373,10 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
|
||||
self.tcx.sess.span_err(
|
||||
trace.origin.span(),
|
||||
format!("{}: {} ({})",
|
||||
&format!("{}: {} ({})",
|
||||
message_root_str,
|
||||
expected_found_str,
|
||||
ty::type_err_to_str(self.tcx, terr)).index(&FullRange));
|
||||
ty::type_err_to_str(self.tcx, terr))[]);
|
||||
|
||||
match trace.origin {
|
||||
infer::MatchExpressionArm(_, arm_span) =>
|
||||
@ -445,42 +445,42 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
// Does the required lifetime have a nice name we can print?
|
||||
self.tcx.sess.span_err(
|
||||
origin.span(),
|
||||
format!("{} may not live long enough", labeled_user_string).index(&FullRange));
|
||||
&format!("{} may not live long enough", labeled_user_string)[]);
|
||||
self.tcx.sess.span_help(
|
||||
origin.span(),
|
||||
format!(
|
||||
&format!(
|
||||
"consider adding an explicit lifetime bound `{}: {}`...",
|
||||
bound_kind.user_string(self.tcx),
|
||||
sub.user_string(self.tcx)).index(&FullRange));
|
||||
sub.user_string(self.tcx))[]);
|
||||
}
|
||||
|
||||
ty::ReStatic => {
|
||||
// Does the required lifetime have a nice name we can print?
|
||||
self.tcx.sess.span_err(
|
||||
origin.span(),
|
||||
format!("{} may not live long enough", labeled_user_string).index(&FullRange));
|
||||
&format!("{} may not live long enough", labeled_user_string)[]);
|
||||
self.tcx.sess.span_help(
|
||||
origin.span(),
|
||||
format!(
|
||||
&format!(
|
||||
"consider adding an explicit lifetime bound `{}: 'static`...",
|
||||
bound_kind.user_string(self.tcx)).index(&FullRange));
|
||||
bound_kind.user_string(self.tcx))[]);
|
||||
}
|
||||
|
||||
_ => {
|
||||
// If not, be less specific.
|
||||
self.tcx.sess.span_err(
|
||||
origin.span(),
|
||||
format!(
|
||||
&format!(
|
||||
"{} may not live long enough",
|
||||
labeled_user_string).index(&FullRange));
|
||||
labeled_user_string)[]);
|
||||
self.tcx.sess.span_help(
|
||||
origin.span(),
|
||||
format!(
|
||||
&format!(
|
||||
"consider adding an explicit lifetime bound for `{}`",
|
||||
bound_kind.user_string(self.tcx)).index(&FullRange));
|
||||
bound_kind.user_string(self.tcx))[]);
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
format!("{} must be valid for ", labeled_user_string).index(&FullRange),
|
||||
&format!("{} must be valid for ", labeled_user_string)[],
|
||||
sub,
|
||||
"...");
|
||||
}
|
||||
@ -517,12 +517,12 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::ReborrowUpvar(span, ref upvar_id) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("lifetime of borrowed pointer outlives \
|
||||
&format!("lifetime of borrowed pointer outlives \
|
||||
lifetime of captured variable `{}`...",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
upvar_id.var_id)
|
||||
.get()
|
||||
.to_string()).index(&FullRange));
|
||||
.to_string())[]);
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"...the borrowed pointer is valid for ",
|
||||
@ -530,11 +530,11 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
"...");
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
format!("...but `{}` is only valid for ",
|
||||
&format!("...but `{}` is only valid for ",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
upvar_id.var_id)
|
||||
.get()
|
||||
.to_string()).index(&FullRange),
|
||||
.to_string())[],
|
||||
sup,
|
||||
"");
|
||||
}
|
||||
@ -576,11 +576,11 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::FreeVariable(span, id) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("captured variable `{}` does not \
|
||||
&format!("captured variable `{}` does not \
|
||||
outlive the enclosing closure",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
id).get()
|
||||
.to_string()).index(&FullRange));
|
||||
.to_string())[]);
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"captured variable is valid for ",
|
||||
@ -620,9 +620,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::RelateParamBound(span, ty) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("the type `{}` does not fulfill the \
|
||||
&format!("the type `{}` does not fulfill the \
|
||||
required lifetime",
|
||||
self.ty_to_string(ty)).index(&FullRange));
|
||||
self.ty_to_string(ty))[]);
|
||||
note_and_explain_region(self.tcx,
|
||||
"type must outlive ",
|
||||
sub,
|
||||
@ -646,9 +646,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::RelateDefaultParamBound(span, ty) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("the type `{}` (provided as the value of \
|
||||
&format!("the type `{}` (provided as the value of \
|
||||
a type parameter) is not valid at this point",
|
||||
self.ty_to_string(ty)).index(&FullRange));
|
||||
self.ty_to_string(ty))[]);
|
||||
note_and_explain_region(self.tcx,
|
||||
"type must outlive ",
|
||||
sub,
|
||||
@ -712,9 +712,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::ExprTypeIsNotInScope(t, span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("type of expression contains references \
|
||||
&format!("type of expression contains references \
|
||||
that are not valid during the expression: `{}`",
|
||||
self.ty_to_string(t)).index(&FullRange));
|
||||
self.ty_to_string(t))[]);
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"type is only valid for ",
|
||||
@ -734,9 +734,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::ReferenceOutlivesReferent(ty, span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("in type `{}`, reference has a longer lifetime \
|
||||
&format!("in type `{}`, reference has a longer lifetime \
|
||||
than the data it references",
|
||||
self.ty_to_string(ty)).index(&FullRange));
|
||||
self.ty_to_string(ty))[]);
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"the pointer is valid for ",
|
||||
@ -861,7 +861,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
let (fn_decl, generics, unsafety, ident, expl_self, span)
|
||||
= node_inner.expect("expect item fn");
|
||||
let taken = lifetimes_in_scope(self.tcx, scope_id);
|
||||
let life_giver = LifeGiver::with_taken(taken.index(&FullRange));
|
||||
let life_giver = LifeGiver::with_taken(&taken[]);
|
||||
let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self,
|
||||
generics, same_regions, &life_giver);
|
||||
let (fn_decl, expl_self, generics) = rebuilder.rebuild();
|
||||
@ -937,7 +937,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
||||
}
|
||||
expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime,
|
||||
&anon_nums, ®ion_names);
|
||||
inputs = self.rebuild_args_ty(inputs.index(&FullRange), lifetime,
|
||||
inputs = self.rebuild_args_ty(&inputs[], lifetime,
|
||||
&anon_nums, ®ion_names);
|
||||
output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names);
|
||||
ty_params = self.rebuild_ty_params(ty_params, lifetime,
|
||||
@ -972,7 +972,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
||||
names.push(lt_name);
|
||||
}
|
||||
names.sort();
|
||||
let name = token::str_to_ident(names[0].index(&FullRange)).name;
|
||||
let name = token::str_to_ident(&names[0][]).name;
|
||||
return (name_to_dummy_lifetime(name), Kept);
|
||||
}
|
||||
return (self.life_giver.give_lifetime(), Fresh);
|
||||
@ -1220,9 +1220,9 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
||||
None => {
|
||||
self.tcx
|
||||
.sess
|
||||
.fatal(format!(
|
||||
.fatal(&format!(
|
||||
"unbound path {}",
|
||||
pprust::path_to_string(path)).index(&FullRange))
|
||||
pprust::path_to_string(path))[])
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
@ -1420,7 +1420,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
opt_explicit_self, generics);
|
||||
let msg = format!("consider using an explicit lifetime \
|
||||
parameter as shown: {}", suggested_fn);
|
||||
self.tcx.sess.span_help(span, msg.index(&FullRange));
|
||||
self.tcx.sess.span_help(span, &msg[]);
|
||||
}
|
||||
|
||||
fn report_inference_failure(&self,
|
||||
@ -1461,9 +1461,9 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
|
||||
self.tcx.sess.span_err(
|
||||
var_origin.span(),
|
||||
format!("cannot infer an appropriate lifetime{} \
|
||||
&format!("cannot infer an appropriate lifetime{} \
|
||||
due to conflicting requirements",
|
||||
var_description).index(&FullRange));
|
||||
var_description)[]);
|
||||
}
|
||||
|
||||
fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) {
|
||||
@ -1510,8 +1510,8 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
Some(values_str) => {
|
||||
self.tcx.sess.span_note(
|
||||
trace.origin.span(),
|
||||
format!("...so that {} ({})",
|
||||
desc, values_str).index(&FullRange));
|
||||
&format!("...so that {} ({})",
|
||||
desc, values_str)[]);
|
||||
}
|
||||
None => {
|
||||
// Really should avoid printing this error at
|
||||
@ -1520,7 +1520,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
// doing right now. - nmatsakis
|
||||
self.tcx.sess.span_note(
|
||||
trace.origin.span(),
|
||||
format!("...so that {}", desc).index(&FullRange));
|
||||
&format!("...so that {}", desc)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1533,11 +1533,11 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::ReborrowUpvar(span, ref upvar_id) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!(
|
||||
&format!(
|
||||
"...so that closure can access `{}`",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id)
|
||||
.get()
|
||||
.to_string()).index(&FullRange))
|
||||
.to_string())[])
|
||||
}
|
||||
infer::InfStackClosure(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1558,11 +1558,11 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::FreeVariable(span, id) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!("...so that captured variable `{}` \
|
||||
&format!("...so that captured variable `{}` \
|
||||
does not outlive the enclosing closure",
|
||||
ty::local_var_name_str(
|
||||
self.tcx,
|
||||
id).get().to_string()).index(&FullRange));
|
||||
id).get().to_string())[]);
|
||||
}
|
||||
infer::IndexSlice(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1604,9 +1604,9 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::ExprTypeIsNotInScope(t, span) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!("...so type `{}` of expression is valid during the \
|
||||
&format!("...so type `{}` of expression is valid during the \
|
||||
expression",
|
||||
self.ty_to_string(t)).index(&FullRange));
|
||||
self.ty_to_string(t))[]);
|
||||
}
|
||||
infer::BindingTypeIsNotValidAtDecl(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1616,30 +1616,30 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
infer::ReferenceOutlivesReferent(ty, span) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!("...so that the reference type `{}` \
|
||||
&format!("...so that the reference type `{}` \
|
||||
does not outlive the data it points at",
|
||||
self.ty_to_string(ty)).index(&FullRange));
|
||||
self.ty_to_string(ty))[]);
|
||||
}
|
||||
infer::RelateParamBound(span, t) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!("...so that the type `{}` \
|
||||
&format!("...so that the type `{}` \
|
||||
will meet the declared lifetime bounds",
|
||||
self.ty_to_string(t)).index(&FullRange));
|
||||
self.ty_to_string(t))[]);
|
||||
}
|
||||
infer::RelateDefaultParamBound(span, t) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!("...so that type parameter \
|
||||
&format!("...so that type parameter \
|
||||
instantiated with `{}`, \
|
||||
will meet its declared lifetime bounds",
|
||||
self.ty_to_string(t)).index(&FullRange));
|
||||
self.ty_to_string(t))[]);
|
||||
}
|
||||
infer::RelateRegionParamBound(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!("...so that the declared lifetime parameter bounds \
|
||||
are satisfied").index(&FullRange));
|
||||
&format!("...so that the declared lifetime parameter bounds \
|
||||
are satisfied")[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1691,7 +1691,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
|
||||
Some(node) => match node {
|
||||
ast_map::NodeItem(item) => match item.node {
|
||||
ast::ItemFn(_, _, _, ref gen, _) => {
|
||||
taken.push_all(gen.lifetimes.index(&FullRange));
|
||||
taken.push_all(&gen.lifetimes[]);
|
||||
None
|
||||
},
|
||||
_ => None
|
||||
@ -1699,7 +1699,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
|
||||
ast_map::NodeImplItem(ii) => {
|
||||
match *ii {
|
||||
ast::MethodImplItem(ref m) => {
|
||||
taken.push_all(m.pe_generics().lifetimes.index(&FullRange));
|
||||
taken.push_all(&m.pe_generics().lifetimes[]);
|
||||
Some(m.id)
|
||||
}
|
||||
ast::TypeImplItem(_) => None,
|
||||
@ -1758,10 +1758,10 @@ impl LifeGiver {
|
||||
let mut lifetime;
|
||||
loop {
|
||||
let mut s = String::from_str("'");
|
||||
s.push_str(num_to_string(self.counter.get()).index(&FullRange));
|
||||
s.push_str(&num_to_string(self.counter.get())[]);
|
||||
if !self.taken.contains(&s) {
|
||||
lifetime = name_to_dummy_lifetime(
|
||||
token::str_to_ident(s.index(&FullRange)).name);
|
||||
token::str_to_ident(&s[]).name);
|
||||
self.generated.borrow_mut().push(lifetime);
|
||||
break;
|
||||
}
|
||||
|
@ -187,9 +187,9 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C
|
||||
|
||||
infcx.tcx.sess.span_bug(
|
||||
span,
|
||||
format!("region {:?} is not associated with \
|
||||
&format!("region {:?} is not associated with \
|
||||
any bound region from A!",
|
||||
r0).index(&FullRange))
|
||||
r0)[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -322,7 +322,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C
|
||||
}
|
||||
infcx.tcx.sess.span_bug(
|
||||
span,
|
||||
format!("could not find original bound region for {:?}", r).index(&FullRange));
|
||||
&format!("could not find original bound region for {:?}", r)[]);
|
||||
}
|
||||
|
||||
fn fresh_bound_variable(infcx: &InferCtxt, debruijn: ty::DebruijnIndex) -> ty::Region {
|
||||
@ -339,7 +339,7 @@ fn var_ids<'tcx, T: Combine<'tcx>>(combiner: &T,
|
||||
r => {
|
||||
combiner.infcx().tcx.sess.span_bug(
|
||||
combiner.trace().origin.span(),
|
||||
format!("found non-region-vid: {:?}", r).index(&FullRange));
|
||||
&format!("found non-region-vid: {:?}", r)[]);
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
@ -1000,9 +1000,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
|
||||
});
|
||||
|
||||
self.tcx.sess.span_err(sp, format!("{}{}",
|
||||
self.tcx.sess.span_err(sp, &format!("{}{}",
|
||||
mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty),
|
||||
error_str).index(&FullRange));
|
||||
error_str)[]);
|
||||
|
||||
for err in err.iter() {
|
||||
ty::note_and_explain_type_err(self.tcx, *err)
|
||||
|
@ -473,9 +473,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
(_, ReLateBound(..)) => {
|
||||
self.tcx.sess.span_bug(
|
||||
origin.span(),
|
||||
format!("cannot relate bound region: {} <= {}",
|
||||
&format!("cannot relate bound region: {} <= {}",
|
||||
sub.repr(self.tcx),
|
||||
sup.repr(self.tcx)).index(&FullRange));
|
||||
sup.repr(self.tcx))[]);
|
||||
}
|
||||
(_, ReStatic) => {
|
||||
// all regions are subregions of static, so we can ignore this
|
||||
@ -734,9 +734,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
(ReEarlyBound(..), _) |
|
||||
(_, ReEarlyBound(..)) => {
|
||||
self.tcx.sess.bug(
|
||||
format!("cannot relate bound region: LUB({}, {})",
|
||||
&format!("cannot relate bound region: LUB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx)).index(&FullRange));
|
||||
b.repr(self.tcx))[]);
|
||||
}
|
||||
|
||||
(ReStatic, _) | (_, ReStatic) => {
|
||||
@ -750,10 +750,10 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
(ReInfer(ReVar(v_id)), _) | (_, ReInfer(ReVar(v_id))) => {
|
||||
self.tcx.sess.span_bug(
|
||||
(*self.var_origins.borrow())[v_id.index as uint].span(),
|
||||
format!("lub_concrete_regions invoked with \
|
||||
&format!("lub_concrete_regions invoked with \
|
||||
non-concrete regions: {:?}, {:?}",
|
||||
a,
|
||||
b).index(&FullRange));
|
||||
b)[]);
|
||||
}
|
||||
|
||||
(ReFree(ref fr), ReScope(s_id)) |
|
||||
@ -834,9 +834,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
(ReEarlyBound(..), _) |
|
||||
(_, ReEarlyBound(..)) => {
|
||||
self.tcx.sess.bug(
|
||||
format!("cannot relate bound region: GLB({}, {})",
|
||||
&format!("cannot relate bound region: GLB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx)).index(&FullRange));
|
||||
b.repr(self.tcx))[]);
|
||||
}
|
||||
|
||||
(ReStatic, r) | (r, ReStatic) => {
|
||||
@ -853,10 +853,10 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
(_, ReInfer(ReVar(v_id))) => {
|
||||
self.tcx.sess.span_bug(
|
||||
(*self.var_origins.borrow())[v_id.index as uint].span(),
|
||||
format!("glb_concrete_regions invoked with \
|
||||
&format!("glb_concrete_regions invoked with \
|
||||
non-concrete regions: {:?}, {:?}",
|
||||
a,
|
||||
b).index(&FullRange));
|
||||
b)[]);
|
||||
}
|
||||
|
||||
(ReFree(ref fr), ReScope(s_id)) |
|
||||
@ -977,7 +977,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
self.expansion(var_data.as_mut_slice());
|
||||
self.contraction(var_data.as_mut_slice());
|
||||
let values =
|
||||
self.extract_values_and_collect_conflicts(var_data.index(&FullRange),
|
||||
self.extract_values_and_collect_conflicts(&var_data[],
|
||||
errors);
|
||||
self.collect_concrete_region_errors(&values, errors);
|
||||
values
|
||||
@ -1411,11 +1411,11 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
|
||||
self.tcx.sess.span_bug(
|
||||
(*self.var_origins.borrow())[node_idx.index as uint].span(),
|
||||
format!("collect_error_for_expanding_node() could not find error \
|
||||
&format!("collect_error_for_expanding_node() could not find error \
|
||||
for var {:?}, lower_bounds={}, upper_bounds={}",
|
||||
node_idx,
|
||||
lower_bounds.repr(self.tcx),
|
||||
upper_bounds.repr(self.tcx)).index(&FullRange));
|
||||
upper_bounds.repr(self.tcx))[]);
|
||||
}
|
||||
|
||||
fn collect_error_for_contracting_node(
|
||||
@ -1456,10 +1456,10 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
|
||||
self.tcx.sess.span_bug(
|
||||
(*self.var_origins.borrow())[node_idx.index as uint].span(),
|
||||
format!("collect_error_for_contracting_node() could not find error \
|
||||
&format!("collect_error_for_contracting_node() could not find error \
|
||||
for var {:?}, upper_bounds={}",
|
||||
node_idx,
|
||||
upper_bounds.repr(self.tcx)).index(&FullRange));
|
||||
upper_bounds.repr(self.tcx))[]);
|
||||
}
|
||||
|
||||
fn collect_concrete_regions(&self,
|
||||
|
@ -95,8 +95,8 @@ impl<'a, 'tcx> ty_fold::TypeFolder<'tcx> for FullTypeResolver<'a, 'tcx> {
|
||||
}
|
||||
ty::ty_infer(_) => {
|
||||
self.infcx.tcx.sess.bug(
|
||||
format!("Unexpected type in full type resolver: {}",
|
||||
t.repr(self.infcx.tcx)).index(&FullRange));
|
||||
&format!("Unexpected type in full type resolver: {}",
|
||||
t.repr(self.infcx.tcx))[]);
|
||||
}
|
||||
_ => {
|
||||
ty_fold::super_fold_ty(self, t)
|
||||
|
@ -326,8 +326,8 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> {
|
||||
None => {
|
||||
self.tcx
|
||||
.sess
|
||||
.span_bug(span, format!("no variable registered for id {}",
|
||||
node_id).index(&FullRange));
|
||||
.span_bug(span, &format!("no variable registered for id {}",
|
||||
node_id)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -597,8 +597,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
// creating liveness nodes for.
|
||||
self.ir.tcx.sess.span_bug(
|
||||
span,
|
||||
format!("no live node registered for node {}",
|
||||
node_id).index(&FullRange));
|
||||
&format!("no live node registered for node {}",
|
||||
node_id)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1133,7 +1133,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
// Uninteresting cases: just propagate in rev exec order
|
||||
|
||||
ast::ExprVec(ref exprs) => {
|
||||
self.propagate_through_exprs(exprs.index(&FullRange), succ)
|
||||
self.propagate_through_exprs(&exprs[], succ)
|
||||
}
|
||||
|
||||
ast::ExprRepeat(ref element, ref count) => {
|
||||
@ -1157,7 +1157,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
} else {
|
||||
succ
|
||||
};
|
||||
let succ = self.propagate_through_exprs(args.index(&FullRange), succ);
|
||||
let succ = self.propagate_through_exprs(&args[], succ);
|
||||
self.propagate_through_expr(&**f, succ)
|
||||
}
|
||||
|
||||
@ -1170,11 +1170,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
} else {
|
||||
succ
|
||||
};
|
||||
self.propagate_through_exprs(args.index(&FullRange), succ)
|
||||
self.propagate_through_exprs(&args[], succ)
|
||||
}
|
||||
|
||||
ast::ExprTup(ref exprs) => {
|
||||
self.propagate_through_exprs(exprs.index(&FullRange), succ)
|
||||
self.propagate_through_exprs(&exprs[], succ)
|
||||
}
|
||||
|
||||
ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op) => {
|
||||
|
@ -584,9 +584,9 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
|
||||
_ => {
|
||||
self.tcx().sess.span_bug(
|
||||
span,
|
||||
format!("Upvar of non-closure {} - {}",
|
||||
&format!("Upvar of non-closure {} - {}",
|
||||
fn_node_id,
|
||||
ty.repr(self.tcx())).index(&FullRange));
|
||||
ty.repr(self.tcx()))[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -615,10 +615,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
match result {
|
||||
None => true,
|
||||
Some((span, msg, note)) => {
|
||||
self.tcx.sess.span_err(span, msg.index(&FullRange));
|
||||
self.tcx.sess.span_err(span, &msg[]);
|
||||
match note {
|
||||
Some((span, msg)) => {
|
||||
self.tcx.sess.span_note(span, msg.index(&FullRange))
|
||||
self.tcx.sess.span_note(span, &msg[])
|
||||
}
|
||||
None => {},
|
||||
}
|
||||
@ -720,7 +720,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
UnnamedField(idx) => format!("field #{} of {} is private",
|
||||
idx + 1, struct_desc),
|
||||
};
|
||||
self.tcx.sess.span_err(span, msg.index(&FullRange));
|
||||
self.tcx.sess.span_err(span, &msg[]);
|
||||
}
|
||||
|
||||
// Given the ID of a method, checks to ensure it's in scope.
|
||||
@ -741,8 +741,8 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
self.report_error(self.ensure_public(span,
|
||||
method_id,
|
||||
None,
|
||||
format!("method `{}`",
|
||||
string).index(&FullRange)));
|
||||
&format!("method `{}`",
|
||||
string)[]));
|
||||
}
|
||||
|
||||
// Checks that a path is in scope.
|
||||
@ -756,7 +756,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
self.ensure_public(span,
|
||||
def,
|
||||
Some(origdid),
|
||||
format!("{} `{}`", tyname, name).index(&FullRange))
|
||||
&format!("{} `{}`", tyname, name)[])
|
||||
};
|
||||
|
||||
match self.last_private_map[path_id] {
|
||||
|
@ -50,7 +50,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool {
|
||||
// monomorphized or it was marked with `#[inline]`. This will only return
|
||||
// true for functions.
|
||||
fn item_might_be_inlined(item: &ast::Item) -> bool {
|
||||
if attributes_specify_inlining(item.attrs.index(&FullRange)) {
|
||||
if attributes_specify_inlining(&item.attrs[]) {
|
||||
return true
|
||||
}
|
||||
|
||||
@ -65,7 +65,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool {
|
||||
|
||||
fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method,
|
||||
impl_src: ast::DefId) -> bool {
|
||||
if attributes_specify_inlining(method.attrs.index(&FullRange)) ||
|
||||
if attributes_specify_inlining(&method.attrs[]) ||
|
||||
generics_require_inlining(method.pe_generics()) {
|
||||
return true
|
||||
}
|
||||
@ -202,7 +202,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
|
||||
ast::MethodImplItem(ref method) => {
|
||||
if generics_require_inlining(method.pe_generics()) ||
|
||||
attributes_specify_inlining(
|
||||
method.attrs.index(&FullRange)) {
|
||||
&method.attrs[]) {
|
||||
true
|
||||
} else {
|
||||
let impl_did = self.tcx
|
||||
@ -247,9 +247,9 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
|
||||
Some(ref item) => self.propagate_node(item, search_item),
|
||||
None if search_item == ast::CRATE_NODE_ID => {}
|
||||
None => {
|
||||
self.tcx.sess.bug(format!("found unmapped ID in worklist: \
|
||||
self.tcx.sess.bug(&format!("found unmapped ID in worklist: \
|
||||
{}",
|
||||
search_item).index(&FullRange))
|
||||
search_item)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -338,10 +338,10 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
|
||||
_ => {
|
||||
self.tcx
|
||||
.sess
|
||||
.bug(format!("found unexpected thingy in worklist: {}",
|
||||
.bug(&format!("found unexpected thingy in worklist: {}",
|
||||
self.tcx
|
||||
.map
|
||||
.node_to_string(search_item)).index(&FullRange))
|
||||
.node_to_string(search_item))[])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -643,7 +643,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) {
|
||||
// A, but the inner rvalues `a()` and `b()` have an extended lifetime
|
||||
// due to rule C.
|
||||
//
|
||||
// FIXME(#6308) -- Note that `.index(&FullRange)` patterns work more smoothly post-DST.
|
||||
// FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST.
|
||||
|
||||
match local.init {
|
||||
Some(ref expr) => {
|
||||
|
@ -398,8 +398,8 @@ impl<'a> LifetimeContext<'a> {
|
||||
fn unresolved_lifetime_ref(&self, lifetime_ref: &ast::Lifetime) {
|
||||
self.sess.span_err(
|
||||
lifetime_ref.span,
|
||||
format!("use of undeclared lifetime name `{}`",
|
||||
token::get_name(lifetime_ref.name)).index(&FullRange));
|
||||
&format!("use of undeclared lifetime name `{}`",
|
||||
token::get_name(lifetime_ref.name))[]);
|
||||
}
|
||||
|
||||
fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec<ast::LifetimeDef>) {
|
||||
@ -411,9 +411,9 @@ impl<'a> LifetimeContext<'a> {
|
||||
if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) {
|
||||
self.sess.span_err(
|
||||
lifetime.lifetime.span,
|
||||
format!("illegal lifetime parameter name: `{}`",
|
||||
&format!("illegal lifetime parameter name: `{}`",
|
||||
token::get_name(lifetime.lifetime.name))
|
||||
.index(&FullRange));
|
||||
[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -424,10 +424,10 @@ impl<'a> LifetimeContext<'a> {
|
||||
if lifetime_i.lifetime.name == lifetime_j.lifetime.name {
|
||||
self.sess.span_err(
|
||||
lifetime_j.lifetime.span,
|
||||
format!("lifetime name `{}` declared twice in \
|
||||
&format!("lifetime name `{}` declared twice in \
|
||||
the same scope",
|
||||
token::get_name(lifetime_j.lifetime.name))
|
||||
.index(&FullRange));
|
||||
[]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -599,12 +599,12 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
|
||||
let span = self.span.unwrap_or(DUMMY_SP);
|
||||
self.tcx().sess.span_bug(
|
||||
span,
|
||||
format!("Type parameter out of range \
|
||||
&format!("Type parameter out of range \
|
||||
when substituting in region {} (root type={}) \
|
||||
(space={:?}, index={})",
|
||||
region_name.as_str(),
|
||||
self.root_ty.repr(self.tcx()),
|
||||
space, i).index(&FullRange));
|
||||
space, i)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -654,14 +654,14 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> {
|
||||
let span = self.span.unwrap_or(DUMMY_SP);
|
||||
self.tcx().sess.span_bug(
|
||||
span,
|
||||
format!("Type parameter `{}` ({}/{:?}/{}) out of range \
|
||||
&format!("Type parameter `{}` ({}/{:?}/{}) out of range \
|
||||
when substituting (root type={}) substs={}",
|
||||
p.repr(self.tcx()),
|
||||
source_ty.repr(self.tcx()),
|
||||
p.space,
|
||||
p.idx,
|
||||
self.root_ty.repr(self.tcx()),
|
||||
self.substs.repr(self.tcx())).index(&FullRange));
|
||||
self.substs.repr(self.tcx()))[]);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -136,8 +136,8 @@ fn ty_is_local_constructor<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
ty::ty_open(..) |
|
||||
ty::ty_err => {
|
||||
tcx.sess.bug(
|
||||
format!("ty_is_local invoked on unexpected type: {}",
|
||||
ty.repr(tcx)).index(&FullRange))
|
||||
&format!("ty_is_local invoked on unexpected type: {}",
|
||||
ty.repr(tcx))[])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -337,7 +337,7 @@ pub fn suggest_new_overflow_limit(tcx: &ty::ctxt, span: Span) {
|
||||
let suggested_limit = current_limit * 2;
|
||||
tcx.sess.span_note(
|
||||
span,
|
||||
format!(
|
||||
&format!(
|
||||
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
|
||||
suggested_limit).index(&FullRange));
|
||||
suggested_limit)[]);
|
||||
}
|
||||
|
@ -227,7 +227,7 @@ impl<'tcx> FulfillmentContext<'tcx> {
|
||||
}
|
||||
|
||||
pub fn pending_obligations(&self) -> &[PredicateObligation<'tcx>] {
|
||||
self.predicates.index(&FullRange)
|
||||
&self.predicates[]
|
||||
}
|
||||
|
||||
/// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
|
||||
|
@ -178,7 +178,7 @@ fn object_safety_violations_for_method<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
// The `Self` type is erased, so it should not appear in list of
|
||||
// arguments or return type apart from the receiver.
|
||||
let ref sig = method.fty.sig;
|
||||
for &input_ty in sig.0.inputs.index(&(1..)).iter() {
|
||||
for &input_ty in sig.0.inputs[1..].iter() {
|
||||
if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) {
|
||||
return Some(MethodViolationCode::ReferencesSelf);
|
||||
}
|
||||
|
@ -903,7 +903,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
|
||||
let all_bounds =
|
||||
util::transitive_bounds(
|
||||
self.tcx(), caller_trait_refs.index(&FullRange));
|
||||
self.tcx(), &caller_trait_refs[]);
|
||||
|
||||
let matching_bounds =
|
||||
all_bounds.filter(
|
||||
@ -1465,9 +1465,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
ty::ty_infer(ty::FreshTy(_)) |
|
||||
ty::ty_infer(ty::FreshIntTy(_)) => {
|
||||
self.tcx().sess.bug(
|
||||
format!(
|
||||
&format!(
|
||||
"asked to assemble builtin bounds of unexpected type: {}",
|
||||
self_ty.repr(self.tcx())).index(&FullRange));
|
||||
self_ty.repr(self.tcx()))[]);
|
||||
}
|
||||
};
|
||||
|
||||
@ -1636,8 +1636,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
AmbiguousBuiltin | ParameterBuiltin => {
|
||||
self.tcx().sess.span_bug(
|
||||
obligation.cause.span,
|
||||
format!("builtin bound for {} was ambig",
|
||||
obligation.repr(self.tcx())).index(&FullRange));
|
||||
&format!("builtin bound for {} was ambig",
|
||||
obligation.repr(self.tcx()))[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1815,8 +1815,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
_ => {
|
||||
self.tcx().sess.span_bug(
|
||||
obligation.cause.span,
|
||||
format!("Fn pointer candidate for inappropriate self type: {}",
|
||||
self_ty.repr(self.tcx())).index(&FullRange));
|
||||
&format!("Fn pointer candidate for inappropriate self type: {}",
|
||||
self_ty.repr(self.tcx()))[]);
|
||||
}
|
||||
};
|
||||
|
||||
@ -1944,9 +1944,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
}
|
||||
Err(()) => {
|
||||
self.tcx().sess.bug(
|
||||
format!("Impl {} was matchable against {} but now is not",
|
||||
&format!("Impl {} was matchable against {} but now is not",
|
||||
impl_def_id.repr(self.tcx()),
|
||||
obligation.repr(self.tcx())).index(&FullRange));
|
||||
obligation.repr(self.tcx()))[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2043,8 +2043,8 @@ impl<'tcx> Predicate<'tcx> {
|
||||
/// struct Foo<T,U:Bar<T>> { ... }
|
||||
///
|
||||
/// Here, the `Generics` for `Foo` would contain a list of bounds like
|
||||
/// `[.index(&FullRange), [U:Bar<T>]]`. Now if there were some particular reference
|
||||
/// like `Foo<int,uint>`, then the `GenericBounds` would be `[.index(&FullRange),
|
||||
/// `[[], [U:Bar<T>]]`. Now if there were some particular reference
|
||||
/// like `Foo<int,uint>`, then the `GenericBounds` would be `[[],
|
||||
/// [uint:Bar<int>]]`.
|
||||
#[derive(Clone, Show)]
|
||||
pub struct GenericBounds<'tcx> {
|
||||
@ -2219,9 +2219,9 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> {
|
||||
ParameterEnvironment::for_item(cx, cx.map.get_parent(id))
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(format!("ParameterEnvironment::from_item(): \
|
||||
cx.sess.bug(&format!("ParameterEnvironment::from_item(): \
|
||||
`{}` is not an item",
|
||||
cx.map.node_to_string(id)).index(&FullRange))
|
||||
cx.map.node_to_string(id))[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2306,7 +2306,7 @@ impl UnboxedClosureKind {
|
||||
};
|
||||
match result {
|
||||
Ok(trait_did) => trait_did,
|
||||
Err(err) => cx.sess.fatal(err.index(&FullRange)),
|
||||
Err(err) => cx.sess.fatal(&err[]),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2627,7 +2627,7 @@ impl FlagComputation {
|
||||
}
|
||||
|
||||
&ty_tup(ref ts) => {
|
||||
self.add_tys(ts.index(&FullRange));
|
||||
self.add_tys(&ts[]);
|
||||
}
|
||||
|
||||
&ty_bare_fn(_, ref f) => {
|
||||
@ -2650,7 +2650,7 @@ impl FlagComputation {
|
||||
fn add_fn_sig(&mut self, fn_sig: &PolyFnSig) {
|
||||
let mut computation = FlagComputation::new();
|
||||
|
||||
computation.add_tys(fn_sig.0.inputs.index(&FullRange));
|
||||
computation.add_tys(&fn_sig.0.inputs[]);
|
||||
|
||||
if let ty::FnConverging(output) = fn_sig.0.output {
|
||||
computation.add_ty(output);
|
||||
@ -2819,7 +2819,7 @@ pub fn mk_trait<'tcx>(cx: &ctxt<'tcx>,
|
||||
|
||||
fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool {
|
||||
bounds.len() == 0 ||
|
||||
bounds.index(&(1..)).iter().enumerate().all(
|
||||
bounds[1..].iter().enumerate().all(
|
||||
|(index, bound)| bounds[index].sort_key() <= bound.sort_key())
|
||||
}
|
||||
|
||||
@ -3073,8 +3073,8 @@ pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
ty_vec(ty, _) => ty,
|
||||
ty_str => mk_mach_uint(cx, ast::TyU8),
|
||||
ty_open(ty) => sequence_element_type(cx, ty),
|
||||
_ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}",
|
||||
ty_to_string(cx, ty)).index(&FullRange)),
|
||||
_ => cx.sess.bug(&format!("sequence_element_type called on non-sequence value: {}",
|
||||
ty_to_string(cx, ty))[]),
|
||||
}
|
||||
}
|
||||
|
||||
@ -3408,7 +3408,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
|
||||
ty_struct(did, substs) => {
|
||||
let flds = struct_fields(cx, did, substs);
|
||||
let mut res =
|
||||
TypeContents::union(flds.index(&FullRange),
|
||||
TypeContents::union(&flds[],
|
||||
|f| tc_mt(cx, f.mt, cache));
|
||||
|
||||
if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) {
|
||||
@ -3432,15 +3432,15 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
|
||||
}
|
||||
|
||||
ty_tup(ref tys) => {
|
||||
TypeContents::union(tys.index(&FullRange),
|
||||
TypeContents::union(&tys[],
|
||||
|ty| tc_ty(cx, *ty, cache))
|
||||
}
|
||||
|
||||
ty_enum(did, substs) => {
|
||||
let variants = substd_enum_variants(cx, did, substs);
|
||||
let mut res =
|
||||
TypeContents::union(variants.index(&FullRange), |variant| {
|
||||
TypeContents::union(variant.args.index(&FullRange),
|
||||
TypeContents::union(&variants[], |variant| {
|
||||
TypeContents::union(&variant.args[],
|
||||
|arg_ty| {
|
||||
tc_ty(cx, *arg_ty, cache)
|
||||
})
|
||||
@ -4024,8 +4024,8 @@ pub fn deref<'tcx>(ty: Ty<'tcx>, explicit: bool) -> Option<mt<'tcx>> {
|
||||
pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
match ty.sty {
|
||||
ty_open(ty) => mk_rptr(cx, cx.mk_region(ReStatic), mt {ty: ty, mutbl:ast::MutImmutable}),
|
||||
_ => cx.sess.bug(format!("Trying to close a non-open type {}",
|
||||
ty_to_string(cx, ty)).index(&FullRange))
|
||||
_ => cx.sess.bug(&format!("Trying to close a non-open type {}",
|
||||
ty_to_string(cx, ty))[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -4125,8 +4125,8 @@ pub fn node_id_to_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId)
|
||||
match cx.trait_refs.borrow().get(&id) {
|
||||
Some(ty) => ty.clone(),
|
||||
None => cx.sess.bug(
|
||||
format!("node_id_to_trait_ref: no trait ref for node `{}`",
|
||||
cx.map.node_to_string(id)).index(&FullRange))
|
||||
&format!("node_id_to_trait_ref: no trait ref for node `{}`",
|
||||
cx.map.node_to_string(id))[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -4138,8 +4138,8 @@ pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> {
|
||||
match try_node_id_to_type(cx, id) {
|
||||
Some(ty) => ty,
|
||||
None => cx.sess.bug(
|
||||
format!("node_id_to_type: no type for node `{}`",
|
||||
cx.map.node_to_string(id)).index(&FullRange))
|
||||
&format!("node_id_to_type: no type for node `{}`",
|
||||
cx.map.node_to_string(id))[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -4225,8 +4225,8 @@ pub fn ty_region(tcx: &ctxt,
|
||||
ref s => {
|
||||
tcx.sess.span_bug(
|
||||
span,
|
||||
format!("ty_region() invoked on an inappropriate ty: {:?}",
|
||||
s).index(&FullRange));
|
||||
&format!("ty_region() invoked on an inappropriate ty: {:?}",
|
||||
s)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4285,13 +4285,13 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span {
|
||||
e.span
|
||||
}
|
||||
Some(f) => {
|
||||
cx.sess.bug(format!("Node id {} is not an expr: {:?}",
|
||||
cx.sess.bug(&format!("Node id {} is not an expr: {:?}",
|
||||
id,
|
||||
f).index(&FullRange));
|
||||
f)[]);
|
||||
}
|
||||
None => {
|
||||
cx.sess.bug(format!("Node id {} is not present \
|
||||
in the node map", id).index(&FullRange));
|
||||
cx.sess.bug(&format!("Node id {} is not present \
|
||||
in the node map", id)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4305,16 +4305,16 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString {
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(
|
||||
format!("Variable id {} maps to {:?}, not local",
|
||||
&format!("Variable id {} maps to {:?}, not local",
|
||||
id,
|
||||
pat).index(&FullRange));
|
||||
pat)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
r => {
|
||||
cx.sess.bug(format!("Variable id {} maps to {:?}, not local",
|
||||
cx.sess.bug(&format!("Variable id {} maps to {:?}, not local",
|
||||
id,
|
||||
r).index(&FullRange));
|
||||
r)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4343,9 +4343,9 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
|
||||
}
|
||||
ref b => {
|
||||
cx.sess.bug(
|
||||
format!("AdjustReifyFnPointer adjustment on non-fn-item: \
|
||||
&format!("AdjustReifyFnPointer adjustment on non-fn-item: \
|
||||
{:?}",
|
||||
b).index(&FullRange));
|
||||
b)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4372,11 +4372,11 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
|
||||
None => {
|
||||
cx.sess.span_bug(
|
||||
span,
|
||||
format!("the {}th autoderef failed: \
|
||||
&format!("the {}th autoderef failed: \
|
||||
{}",
|
||||
i,
|
||||
ty_to_string(cx, adjusted_ty))
|
||||
.index(&FullRange));
|
||||
[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4438,8 +4438,8 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>,
|
||||
mk_vec(cx, ty, None)
|
||||
}
|
||||
_ => cx.sess.span_bug(span,
|
||||
format!("UnsizeLength with bad sty: {:?}",
|
||||
ty_to_string(cx, ty)).index(&FullRange))
|
||||
&format!("UnsizeLength with bad sty: {:?}",
|
||||
ty_to_string(cx, ty))[])
|
||||
},
|
||||
&UnsizeStruct(box ref k, tp_index) => match ty.sty {
|
||||
ty_struct(did, substs) => {
|
||||
@ -4450,8 +4450,8 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>,
|
||||
mk_struct(cx, did, cx.mk_substs(unsized_substs))
|
||||
}
|
||||
_ => cx.sess.span_bug(span,
|
||||
format!("UnsizeStruct with bad sty: {:?}",
|
||||
ty_to_string(cx, ty)).index(&FullRange))
|
||||
&format!("UnsizeStruct with bad sty: {:?}",
|
||||
ty_to_string(cx, ty))[])
|
||||
},
|
||||
&UnsizeVtable(TyTrait { ref principal, ref bounds }, _) => {
|
||||
mk_trait(cx, principal.clone(), bounds.clone())
|
||||
@ -4463,8 +4463,8 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def {
|
||||
match tcx.def_map.borrow().get(&expr.id) {
|
||||
Some(&def) => def,
|
||||
None => {
|
||||
tcx.sess.span_bug(expr.span, format!(
|
||||
"no def-map entry for expr {}", expr.id).index(&FullRange));
|
||||
tcx.sess.span_bug(expr.span, &format!(
|
||||
"no def-map entry for expr {}", expr.id)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4557,9 +4557,9 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
|
||||
def => {
|
||||
tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("uncategorized def for expr {}: {:?}",
|
||||
&format!("uncategorized def for expr {}: {:?}",
|
||||
expr.id,
|
||||
def).index(&FullRange));
|
||||
def)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4679,12 +4679,12 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
|
||||
-> uint {
|
||||
let mut i = 0u;
|
||||
for f in fields.iter() { if f.name == name { return i; } i += 1u; }
|
||||
tcx.sess.bug(format!(
|
||||
tcx.sess.bug(&format!(
|
||||
"no field named `{}` found in the list of fields `{:?}`",
|
||||
token::get_name(name),
|
||||
fields.iter()
|
||||
.map(|f| token::get_name(f.name).get().to_string())
|
||||
.collect::<Vec<String>>()).index(&FullRange));
|
||||
.collect::<Vec<String>>())[]);
|
||||
}
|
||||
|
||||
pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem])
|
||||
@ -4939,7 +4939,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
|
||||
match item.node {
|
||||
ItemTrait(_, _, _, ref ms) => {
|
||||
let (_, p) =
|
||||
ast_util::split_trait_methods(ms.index(&FullRange));
|
||||
ast_util::split_trait_methods(&ms[]);
|
||||
p.iter()
|
||||
.map(|m| {
|
||||
match impl_or_trait_item(
|
||||
@ -4956,16 +4956,16 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
|
||||
}).collect()
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(format!("provided_trait_methods: `{:?}` is \
|
||||
cx.sess.bug(&format!("provided_trait_methods: `{:?}` is \
|
||||
not a trait",
|
||||
id).index(&FullRange))
|
||||
id)[])
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(format!("provided_trait_methods: `{:?}` is not a \
|
||||
cx.sess.bug(&format!("provided_trait_methods: `{:?}` is not a \
|
||||
trait",
|
||||
id).index(&FullRange))
|
||||
id)[])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -5203,7 +5203,7 @@ impl<'tcx> VariantInfo<'tcx> {
|
||||
};
|
||||
},
|
||||
ast::StructVariantKind(ref struct_def) => {
|
||||
let fields: &[StructField] = struct_def.fields.index(&FullRange);
|
||||
let fields: &[StructField] = &struct_def.fields[];
|
||||
|
||||
assert!(fields.len() > 0);
|
||||
|
||||
@ -5353,8 +5353,8 @@ pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
|
||||
Err(ref err) => {
|
||||
cx.sess
|
||||
.span_err(e.span,
|
||||
format!("expected constant: {}",
|
||||
*err).index(&FullRange));
|
||||
&format!("expected constant: {}",
|
||||
*err)[]);
|
||||
}
|
||||
},
|
||||
None => {}
|
||||
@ -5643,8 +5643,8 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> {
|
||||
Some(fields) => (**fields).clone(),
|
||||
_ => {
|
||||
cx.sess.bug(
|
||||
format!("ID not mapped to struct fields: {}",
|
||||
cx.map.node_to_string(did.node)).index(&FullRange));
|
||||
&format!("ID not mapped to struct fields: {}",
|
||||
cx.map.node_to_string(did.node))[]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -5677,7 +5677,7 @@ pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tc
|
||||
pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec<field<'tcx>> {
|
||||
v.iter().enumerate().map(|(i, &f)| {
|
||||
field {
|
||||
name: token::intern(i.to_string().index(&FullRange)),
|
||||
name: token::intern(&i.to_string()[]),
|
||||
mt: mt {
|
||||
ty: f,
|
||||
mutbl: MutImmutable
|
||||
@ -5852,9 +5852,9 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint {
|
||||
const_eval::const_binary(_) =>
|
||||
"binary array"
|
||||
};
|
||||
tcx.sess.span_err(count_expr.span, format!(
|
||||
tcx.sess.span_err(count_expr.span, &format!(
|
||||
"expected positive integer for repeat count, found {}",
|
||||
found).index(&FullRange));
|
||||
found)[]);
|
||||
}
|
||||
Err(_) => {
|
||||
let found = match count_expr.node {
|
||||
@ -5867,9 +5867,9 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint {
|
||||
_ =>
|
||||
"non-constant expression"
|
||||
};
|
||||
tcx.sess.span_err(count_expr.span, format!(
|
||||
tcx.sess.span_err(count_expr.span, &format!(
|
||||
"expected constant integer for repeat count, found {}",
|
||||
found).index(&FullRange));
|
||||
found)[]);
|
||||
}
|
||||
}
|
||||
0
|
||||
@ -6654,7 +6654,7 @@ pub fn with_freevars<T, F>(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where
|
||||
{
|
||||
match tcx.freevars.borrow().get(&fid) {
|
||||
None => f(&[]),
|
||||
Some(d) => f(d.index(&FullRange))
|
||||
Some(d) => f(&d[])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,17 +223,17 @@ impl<'a> PluginLoader<'a> {
|
||||
// this is fatal: there are almost certainly macros we need
|
||||
// inside this crate, so continue would spew "macro undefined"
|
||||
// errors
|
||||
Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange))
|
||||
Err(err) => self.sess.span_fatal(vi.span, &err[])
|
||||
};
|
||||
|
||||
unsafe {
|
||||
let registrar =
|
||||
match lib.symbol(symbol.index(&FullRange)) {
|
||||
match lib.symbol(&symbol[]) {
|
||||
Ok(registrar) => {
|
||||
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
|
||||
}
|
||||
// again fatal if we can't register macros
|
||||
Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange))
|
||||
Err(err) => self.sess.span_fatal(vi.span, &err[])
|
||||
};
|
||||
|
||||
// Intentionally leak the dynamic library. We can't ever unload it
|
||||
|
@ -576,18 +576,18 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
|
||||
if !setter(&mut cg, value) {
|
||||
match (value, opt_type_desc) {
|
||||
(Some(..), None) => {
|
||||
early_error(format!("codegen option `{}` takes no \
|
||||
value", key).index(&FullRange))
|
||||
early_error(&format!("codegen option `{}` takes no \
|
||||
value", key)[])
|
||||
}
|
||||
(None, Some(type_desc)) => {
|
||||
early_error(format!("codegen option `{0}` requires \
|
||||
early_error(&format!("codegen option `{0}` requires \
|
||||
{1} (-C {0}=<value>)",
|
||||
key, type_desc).index(&FullRange))
|
||||
key, type_desc)[])
|
||||
}
|
||||
(Some(value), Some(type_desc)) => {
|
||||
early_error(format!("incorrect value `{}` for codegen \
|
||||
early_error(&format!("incorrect value `{}` for codegen \
|
||||
option `{}` - {} was expected",
|
||||
value, key, type_desc).index(&FullRange))
|
||||
value, key, type_desc)[])
|
||||
}
|
||||
(None, None) => unreachable!()
|
||||
}
|
||||
@ -596,8 +596,8 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
|
||||
break;
|
||||
}
|
||||
if !found {
|
||||
early_error(format!("unknown codegen option: `{}`",
|
||||
key).index(&FullRange));
|
||||
early_error(&format!("unknown codegen option: `{}`",
|
||||
key)[]);
|
||||
}
|
||||
}
|
||||
return cg;
|
||||
@ -610,10 +610,10 @@ pub fn default_lib_output() -> CrateType {
|
||||
pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
use syntax::parse::token::intern_and_get_ident as intern;
|
||||
|
||||
let end = sess.target.target.target_endian.index(&FullRange);
|
||||
let arch = sess.target.target.arch.index(&FullRange);
|
||||
let wordsz = sess.target.target.target_pointer_width.index(&FullRange);
|
||||
let os = sess.target.target.target_os.index(&FullRange);
|
||||
let end = &sess.target.target.target_endian[];
|
||||
let arch = &sess.target.target.arch[];
|
||||
let wordsz = &sess.target.target.target_pointer_width[];
|
||||
let os = &sess.target.target.target_os[];
|
||||
|
||||
let fam = match sess.target.target.options.is_like_windows {
|
||||
true => InternedString::new("windows"),
|
||||
@ -649,23 +649,23 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
append_configuration(&mut user_cfg, InternedString::new("test"))
|
||||
}
|
||||
let mut v = user_cfg.into_iter().collect::<Vec<_>>();
|
||||
v.push_all(default_cfg.index(&FullRange));
|
||||
v.push_all(&default_cfg[]);
|
||||
v
|
||||
}
|
||||
|
||||
pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config {
|
||||
let target = match Target::search(opts.target_triple.index(&FullRange)) {
|
||||
let target = match Target::search(&opts.target_triple[]) {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
sp.handler().fatal((format!("Error loading target specification: {}", e)).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
let (int_type, uint_type) = match target.target_pointer_width.index(&FullRange) {
|
||||
let (int_type, uint_type) = match &target.target_pointer_width[] {
|
||||
"32" => (ast::TyI32, ast::TyU32),
|
||||
"64" => (ast::TyI64, ast::TyU64),
|
||||
w => sp.handler().fatal((format!("target specification was invalid: unrecognized \
|
||||
target-word-size {}", w)).index(&FullRange))
|
||||
w => sp.handler().fatal(&format!("target specification was invalid: unrecognized \
|
||||
target-word-size {}", w)[])
|
||||
};
|
||||
|
||||
Config {
|
||||
@ -863,7 +863,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
|
||||
let unparsed_crate_types = matches.opt_strs("crate-type");
|
||||
let crate_types = parse_crate_types_from_list(unparsed_crate_types)
|
||||
.unwrap_or_else(|e| early_error(e.index(&FullRange)));
|
||||
.unwrap_or_else(|e| early_error(&e[]));
|
||||
|
||||
let mut lint_opts = vec!();
|
||||
let mut describe_lints = false;
|
||||
@ -890,8 +890,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
}
|
||||
}
|
||||
if this_bit == 0 {
|
||||
early_error(format!("unknown debug flag: {}",
|
||||
*debug_flag).index(&FullRange))
|
||||
early_error(&format!("unknown debug flag: {}",
|
||||
*debug_flag)[])
|
||||
}
|
||||
debugging_opts |= this_bit;
|
||||
}
|
||||
@ -935,8 +935,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
"link" => OutputTypeExe,
|
||||
"dep-info" => OutputTypeDepInfo,
|
||||
_ => {
|
||||
early_error(format!("unknown emission type: `{}`",
|
||||
part).index(&FullRange))
|
||||
early_error(&format!("unknown emission type: `{}`",
|
||||
part)[])
|
||||
}
|
||||
};
|
||||
output_types.push(output_type)
|
||||
@ -973,9 +973,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
Some("2") => Default,
|
||||
Some("3") => Aggressive,
|
||||
Some(arg) => {
|
||||
early_error(format!("optimization level needs to be \
|
||||
early_error(&format!("optimization level needs to be \
|
||||
between 0-3 (instead was `{}`)",
|
||||
arg).index(&FullRange));
|
||||
arg)[]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -1011,9 +1011,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
None |
|
||||
Some("2") => FullDebugInfo,
|
||||
Some(arg) => {
|
||||
early_error(format!("debug info level needs to be between \
|
||||
early_error(&format!("debug info level needs to be between \
|
||||
0-2 (instead was `{}`)",
|
||||
arg).index(&FullRange));
|
||||
arg)[]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -1031,7 +1031,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
|
||||
let mut search_paths = SearchPaths::new();
|
||||
for s in matches.opt_strs("L").iter() {
|
||||
search_paths.add_path(s.index(&FullRange));
|
||||
search_paths.add_path(&s[]);
|
||||
}
|
||||
|
||||
let libs = matches.opt_strs("l").into_iter().map(|s| {
|
||||
@ -1061,9 +1061,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
(Some(name), "framework") => (name, cstore::NativeFramework),
|
||||
(Some(name), "static") => (name, cstore::NativeStatic),
|
||||
(_, s) => {
|
||||
early_error(format!("unknown library kind `{}`, expected \
|
||||
early_error(&format!("unknown library kind `{}`, expected \
|
||||
one of dylib, framework, or static",
|
||||
s).index(&FullRange));
|
||||
s)[]);
|
||||
}
|
||||
};
|
||||
(name.to_string(), kind)
|
||||
@ -1107,7 +1107,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
--debuginfo");
|
||||
}
|
||||
|
||||
let color = match matches.opt_str("color").as_ref().map(|s| s.index(&FullRange)) {
|
||||
let color = match matches.opt_str("color").as_ref().map(|s| &s[]) {
|
||||
Some("auto") => Auto,
|
||||
Some("always") => Always,
|
||||
Some("never") => Never,
|
||||
@ -1115,9 +1115,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
None => Auto,
|
||||
|
||||
Some(arg) => {
|
||||
early_error(format!("argument for --color must be auto, always \
|
||||
early_error(&format!("argument for --color must be auto, always \
|
||||
or never (instead was `{}`)",
|
||||
arg).index(&FullRange))
|
||||
arg)[])
|
||||
}
|
||||
};
|
||||
|
||||
@ -1220,7 +1220,7 @@ mod test {
|
||||
#[test]
|
||||
fn test_switch_implies_cfg_test() {
|
||||
let matches =
|
||||
&match getopts(&["--test".to_string()], optgroups().index(&FullRange)) {
|
||||
&match getopts(&["--test".to_string()], &optgroups()[]) {
|
||||
Ok(m) => m,
|
||||
Err(f) => panic!("test_switch_implies_cfg_test: {}", f)
|
||||
};
|
||||
@ -1228,7 +1228,7 @@ mod test {
|
||||
let sessopts = build_session_options(matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let cfg = build_configuration(&sess);
|
||||
assert!((attr::contains_name(cfg.index(&FullRange), "test")));
|
||||
assert!((attr::contains_name(&cfg[], "test")));
|
||||
}
|
||||
|
||||
// When the user supplies --test and --cfg test, don't implicitly add
|
||||
@ -1237,7 +1237,7 @@ mod test {
|
||||
fn test_switch_implies_cfg_test_unless_cfg_test() {
|
||||
let matches =
|
||||
&match getopts(&["--test".to_string(), "--cfg=test".to_string()],
|
||||
optgroups().index(&FullRange)) {
|
||||
&optgroups()[]) {
|
||||
Ok(m) => m,
|
||||
Err(f) => {
|
||||
panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f)
|
||||
@ -1257,7 +1257,7 @@ mod test {
|
||||
{
|
||||
let matches = getopts(&[
|
||||
"-Awarnings".to_string()
|
||||
], optgroups().index(&FullRange)).unwrap();
|
||||
], &optgroups()[]).unwrap();
|
||||
let registry = diagnostics::registry::Registry::new(&[]);
|
||||
let sessopts = build_session_options(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
@ -1268,7 +1268,7 @@ mod test {
|
||||
let matches = getopts(&[
|
||||
"-Awarnings".to_string(),
|
||||
"-Dwarnings".to_string()
|
||||
], optgroups().index(&FullRange)).unwrap();
|
||||
], &optgroups()[]).unwrap();
|
||||
let registry = diagnostics::registry::Registry::new(&[]);
|
||||
let sessopts = build_session_options(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
@ -1278,7 +1278,7 @@ mod test {
|
||||
{
|
||||
let matches = getopts(&[
|
||||
"-Adead_code".to_string()
|
||||
], optgroups().index(&FullRange)).unwrap();
|
||||
], &optgroups()[]).unwrap();
|
||||
let registry = diagnostics::registry::Registry::new(&[]);
|
||||
let sessopts = build_session_options(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
|
@ -174,7 +174,7 @@ impl Session {
|
||||
// cases later on
|
||||
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
|
||||
self.span_bug(sp,
|
||||
format!("impossible case reached: {}", msg).index(&FullRange));
|
||||
&format!("impossible case reached: {}", msg)[]);
|
||||
}
|
||||
pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) }
|
||||
pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) }
|
||||
@ -216,7 +216,7 @@ impl Session {
|
||||
}
|
||||
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
|
||||
filesearch::FileSearch::new(self.sysroot(),
|
||||
self.opts.target_triple.index(&FullRange),
|
||||
&self.opts.target_triple[],
|
||||
&self.opts.search_paths,
|
||||
kind)
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ fn test_lev_distance() {
|
||||
for c in range(0u32, MAX as u32)
|
||||
.filter_map(|i| from_u32(i))
|
||||
.map(|i| i.to_string()) {
|
||||
assert_eq!(lev_distance(c.index(&FullRange), c.index(&FullRange)), 0);
|
||||
assert_eq!(lev_distance(&c[], &c[]), 0);
|
||||
}
|
||||
|
||||
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
|
||||
|
@ -56,12 +56,12 @@ pub fn note_and_explain_region(cx: &ctxt,
|
||||
(ref str, Some(span)) => {
|
||||
cx.sess.span_note(
|
||||
span,
|
||||
format!("{}{}{}", prefix, *str, suffix).index(&FullRange));
|
||||
&format!("{}{}{}", prefix, *str, suffix)[]);
|
||||
Some(span)
|
||||
}
|
||||
(ref str, None) => {
|
||||
cx.sess.note(
|
||||
format!("{}{}{}", prefix, *str, suffix).index(&FullRange));
|
||||
&format!("{}{}{}", prefix, *str, suffix)[]);
|
||||
None
|
||||
}
|
||||
}
|
||||
@ -272,7 +272,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
};
|
||||
|
||||
if abi != abi::Rust {
|
||||
s.push_str(format!("extern {} ", abi.to_string()).index(&FullRange));
|
||||
s.push_str(&format!("extern {} ", abi.to_string())[]);
|
||||
};
|
||||
|
||||
s.push_str("fn");
|
||||
@ -291,7 +291,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
Some(def_id) => {
|
||||
s.push_str(" {");
|
||||
let path_str = ty::item_path_str(cx, def_id);
|
||||
s.push_str(path_str.index(&FullRange));
|
||||
s.push_str(&path_str[]);
|
||||
s.push_str("}");
|
||||
}
|
||||
None => { }
|
||||
@ -306,7 +306,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
match cty.store {
|
||||
ty::UniqTraitStore => {}
|
||||
ty::RegionTraitStore(region, _) => {
|
||||
s.push_str(region_to_string(cx, "", true, region).index(&FullRange));
|
||||
s.push_str(®ion_to_string(cx, "", true, region)[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -325,7 +325,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
assert_eq!(cty.onceness, ast::Once);
|
||||
s.push_str("proc");
|
||||
push_sig_to_string(cx, &mut s, '(', ')', &cty.sig,
|
||||
bounds_str.index(&FullRange));
|
||||
&bounds_str[]);
|
||||
}
|
||||
ty::RegionTraitStore(..) => {
|
||||
match cty.onceness {
|
||||
@ -333,7 +333,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
ast::Once => s.push_str("once ")
|
||||
}
|
||||
push_sig_to_string(cx, &mut s, '|', '|', &cty.sig,
|
||||
bounds_str.index(&FullRange));
|
||||
&bounds_str[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -366,7 +366,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
ty::FnConverging(t) => {
|
||||
if !ty::type_is_nil(t) {
|
||||
s.push_str(" -> ");
|
||||
s.push_str(ty_to_string(cx, t).index(&FullRange));
|
||||
s.push_str(&ty_to_string(cx, t)[]);
|
||||
}
|
||||
}
|
||||
ty::FnDiverging => {
|
||||
@ -403,7 +403,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
}
|
||||
ty_rptr(r, ref tm) => {
|
||||
let mut buf = region_ptr_to_string(cx, *r);
|
||||
buf.push_str(mt_to_string(cx, tm).index(&FullRange));
|
||||
buf.push_str(&mt_to_string(cx, tm)[]);
|
||||
buf
|
||||
}
|
||||
ty_open(typ) =>
|
||||
@ -413,7 +413,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
.iter()
|
||||
.map(|elem| ty_to_string(cx, *elem))
|
||||
.collect::<Vec<_>>();
|
||||
match strs.index(&FullRange) {
|
||||
match &strs[] {
|
||||
[ref string] => format!("({},)", string),
|
||||
strs => format!("({})", strs.connect(", "))
|
||||
}
|
||||
@ -542,7 +542,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
|
||||
0
|
||||
};
|
||||
|
||||
for t in tps.index(&(0..(tps.len() - num_defaults))).iter() {
|
||||
for t in tps[0..(tps.len() - num_defaults)].iter() {
|
||||
strs.push(ty_to_string(cx, *t))
|
||||
}
|
||||
|
||||
@ -550,11 +550,11 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
|
||||
format!("{}({}){}",
|
||||
base,
|
||||
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
|
||||
strs[0].index(&(1 .. (strs[0].len() - 2))) // Remove '(' and ',)'
|
||||
&strs[0][1 .. (strs[0].len() - 2)] // Remove '(' and ',)'
|
||||
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
|
||||
strs[0].index(&(1 .. (strs[0].len() - 1))) // Remove '(' and ')'
|
||||
&strs[0][1 .. (strs[0].len() - 1)] // Remove '(' and ')'
|
||||
} else {
|
||||
strs[0].index(&FullRange)
|
||||
&strs[0][]
|
||||
},
|
||||
if &*strs[1] == "()" { String::new() } else { format!(" -> {}", strs[1]) })
|
||||
} else if strs.len() > 0 {
|
||||
@ -567,7 +567,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
|
||||
pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
|
||||
let mut s = typ.repr(cx).to_string();
|
||||
if s.len() >= 32u {
|
||||
s = s.index(&(0u..32u)).to_string();
|
||||
s = (&s[0u..32u]).to_string();
|
||||
}
|
||||
return s;
|
||||
}
|
||||
@ -632,7 +632,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] {
|
||||
|
||||
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
|
||||
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
|
||||
repr_vec(tcx, self.index(&FullRange))
|
||||
repr_vec(tcx, &self[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -640,7 +640,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
|
||||
// autoderef cannot convert the &[T] handler
|
||||
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
|
||||
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
|
||||
repr_vec(tcx, self.index(&FullRange))
|
||||
repr_vec(tcx, &self[])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ impl<T,U,D:SnapshotVecDelegate<T,U>> SnapshotVec<T,U,D> {
|
||||
pub fn actions_since_snapshot(&self,
|
||||
snapshot: &Snapshot)
|
||||
-> &[UndoLog<T,U>] {
|
||||
self.undo_log.index(&(snapshot.length..))
|
||||
&self.undo_log[snapshot.length..]
|
||||
}
|
||||
|
||||
fn assert_open_snapshot(&self, snapshot: &Snapshot) {
|
||||
|
@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
|
||||
args: &str, cwd: Option<&Path>,
|
||||
paths: &[&Path]) -> ProcessOutput {
|
||||
let ar = match *maybe_ar_prog {
|
||||
Some(ref ar) => ar.index(&FullRange),
|
||||
Some(ref ar) => &ar[],
|
||||
None => "ar"
|
||||
};
|
||||
let mut cmd = Command::new(ar);
|
||||
@ -73,24 +73,21 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
|
||||
Ok(prog) => {
|
||||
let o = prog.wait_with_output().unwrap();
|
||||
if !o.status.success() {
|
||||
handler.err(format!("{} failed with: {}",
|
||||
handler.err(&format!("{} failed with: {}",
|
||||
cmd,
|
||||
o.status).index(&FullRange));
|
||||
handler.note(format!("stdout ---\n{}",
|
||||
str::from_utf8(o.output
|
||||
.index(&FullRange)).unwrap())
|
||||
.index(&FullRange));
|
||||
handler.note(format!("stderr ---\n{}",
|
||||
str::from_utf8(o.error
|
||||
.index(&FullRange)).unwrap())
|
||||
.index(&FullRange));
|
||||
o.status)[]);
|
||||
handler.note(&format!("stdout ---\n{}",
|
||||
str::from_utf8(&o.output[]).unwrap())[]);
|
||||
handler.note(&format!("stderr ---\n{}",
|
||||
str::from_utf8(&o.error[]).unwrap())
|
||||
[]);
|
||||
handler.abort_if_errors();
|
||||
}
|
||||
o
|
||||
},
|
||||
Err(e) => {
|
||||
handler.err(format!("could not exec `{}`: {}", ar.index(&FullRange),
|
||||
e).index(&FullRange));
|
||||
handler.err(&format!("could not exec `{}`: {}", &ar[],
|
||||
e)[]);
|
||||
handler.abort_if_errors();
|
||||
panic!("rustc::back::archive::run_ar() should not reach this point");
|
||||
}
|
||||
@ -106,16 +103,16 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str,
|
||||
|
||||
for path in search_paths.iter() {
|
||||
debug!("looking for {} inside {:?}", name, path.display());
|
||||
let test = path.join(oslibname.index(&FullRange));
|
||||
let test = path.join(&oslibname[]);
|
||||
if test.exists() { return test }
|
||||
if oslibname != unixlibname {
|
||||
let test = path.join(unixlibname.index(&FullRange));
|
||||
let test = path.join(&unixlibname[]);
|
||||
if test.exists() { return test }
|
||||
}
|
||||
}
|
||||
handler.fatal(format!("could not find native static library `{}`, \
|
||||
handler.fatal(&format!("could not find native static library `{}`, \
|
||||
perhaps an -L flag is missing?",
|
||||
name).index(&FullRange));
|
||||
name)[]);
|
||||
}
|
||||
|
||||
impl<'a> Archive<'a> {
|
||||
@ -147,7 +144,7 @@ impl<'a> Archive<'a> {
|
||||
/// Lists all files in an archive
|
||||
pub fn files(&self) -> Vec<String> {
|
||||
let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]);
|
||||
let output = str::from_utf8(output.output.index(&FullRange)).unwrap();
|
||||
let output = str::from_utf8(&output.output[]).unwrap();
|
||||
// use lines_any because windows delimits output with `\r\n` instead of
|
||||
// just `\n`
|
||||
output.lines_any().map(|s| s.to_string()).collect()
|
||||
@ -179,9 +176,9 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
/// search in the relevant locations for a library named `name`.
|
||||
pub fn add_native_library(&mut self, name: &str) -> io::IoResult<()> {
|
||||
let location = find_library(name,
|
||||
self.archive.slib_prefix.index(&FullRange),
|
||||
self.archive.slib_suffix.index(&FullRange),
|
||||
self.archive.lib_search_paths.index(&FullRange),
|
||||
&self.archive.slib_prefix[],
|
||||
&self.archive.slib_suffix[],
|
||||
&self.archive.lib_search_paths[],
|
||||
self.archive.handler);
|
||||
self.add_archive(&location, name, |_| false)
|
||||
}
|
||||
@ -197,12 +194,12 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// as simple comparison is not enough - there
|
||||
// might be also an extra name suffix
|
||||
let obj_start = format!("{}", name);
|
||||
let obj_start = obj_start.index(&FullRange);
|
||||
let obj_start = &obj_start[];
|
||||
// Ignoring all bytecode files, no matter of
|
||||
// name
|
||||
let bc_ext = ".bytecode.deflate";
|
||||
|
||||
self.add_archive(rlib, name.index(&FullRange), |fname: &str| {
|
||||
self.add_archive(rlib, &name[], |fname: &str| {
|
||||
let skip_obj = lto && fname.starts_with(obj_start)
|
||||
&& fname.ends_with(".o");
|
||||
skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME
|
||||
@ -239,7 +236,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// allow running `ar s file.a` to update symbols only.
|
||||
if self.should_update_symbols {
|
||||
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
|
||||
"s", Some(self.work_dir.path()), args.index(&FullRange));
|
||||
"s", Some(self.work_dir.path()), &args[]);
|
||||
}
|
||||
return self.archive;
|
||||
}
|
||||
@ -259,7 +256,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// Add the archive members seen so far, without updating the
|
||||
// symbol table (`S`).
|
||||
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
|
||||
"cruS", Some(self.work_dir.path()), args.index(&FullRange));
|
||||
"cruS", Some(self.work_dir.path()), &args[]);
|
||||
|
||||
args.clear();
|
||||
args.push(&abs_dst);
|
||||
@ -274,7 +271,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// necessary.
|
||||
let flags = if self.should_update_symbols { "crus" } else { "cruS" };
|
||||
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
|
||||
flags, Some(self.work_dir.path()), args.index(&FullRange));
|
||||
flags, Some(self.work_dir.path()), &args[]);
|
||||
|
||||
self.archive
|
||||
}
|
||||
@ -316,7 +313,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
} else {
|
||||
filename
|
||||
};
|
||||
let new_filename = self.work_dir.path().join(filename.index(&FullRange));
|
||||
let new_filename = self.work_dir.path().join(&filename[]);
|
||||
try!(fs::rename(file, &new_filename));
|
||||
self.members.push(Path::new(filename));
|
||||
}
|
||||
|
@ -44,15 +44,15 @@ pub fn get_rpath_flags<F, G>(config: RPathConfig<F, G>) -> Vec<String> where
|
||||
l.map(|p| p.clone())
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
let rpaths = get_rpaths(config, libs.index(&FullRange));
|
||||
flags.push_all(rpaths_to_flags(rpaths.index(&FullRange)).index(&FullRange));
|
||||
let rpaths = get_rpaths(config, &libs[]);
|
||||
flags.push_all(&rpaths_to_flags(&rpaths[])[]);
|
||||
flags
|
||||
}
|
||||
|
||||
fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
|
||||
let mut ret = Vec::new();
|
||||
for rpath in rpaths.iter() {
|
||||
ret.push(format!("-Wl,-rpath,{}", (*rpath).index(&FullRange)));
|
||||
ret.push(format!("-Wl,-rpath,{}", &(*rpath)[]));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
@ -82,14 +82,14 @@ fn get_rpaths<F, G>(mut config: RPathConfig<F, G>, libs: &[Path]) -> Vec<String>
|
||||
}
|
||||
}
|
||||
|
||||
log_rpaths("relative", rel_rpaths.index(&FullRange));
|
||||
log_rpaths("fallback", fallback_rpaths.index(&FullRange));
|
||||
log_rpaths("relative", &rel_rpaths[]);
|
||||
log_rpaths("fallback", &fallback_rpaths[]);
|
||||
|
||||
let mut rpaths = rel_rpaths;
|
||||
rpaths.push_all(fallback_rpaths.index(&FullRange));
|
||||
rpaths.push_all(&fallback_rpaths[]);
|
||||
|
||||
// Remove duplicates
|
||||
let rpaths = minimize_rpaths(rpaths.index(&FullRange));
|
||||
let rpaths = minimize_rpaths(&rpaths[]);
|
||||
return rpaths;
|
||||
}
|
||||
|
||||
@ -140,7 +140,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
|
||||
let mut set = HashSet::new();
|
||||
let mut minimized = Vec::new();
|
||||
for rpath in rpaths.iter() {
|
||||
if set.insert(rpath.index(&FullRange)) {
|
||||
if set.insert(&rpath[]) {
|
||||
minimized.push(rpath.clone());
|
||||
}
|
||||
}
|
||||
|
@ -140,7 +140,7 @@ impl FixedBuffer for FixedBuffer64 {
|
||||
if input.len() >= buffer_remaining {
|
||||
copy_memory(
|
||||
self.buffer.slice_mut(self.buffer_idx, size),
|
||||
input.index(&(0..buffer_remaining)));
|
||||
&input[0..buffer_remaining]);
|
||||
self.buffer_idx = 0;
|
||||
func(&self.buffer);
|
||||
i += buffer_remaining;
|
||||
@ -156,7 +156,7 @@ impl FixedBuffer for FixedBuffer64 {
|
||||
// While we have at least a full buffer size chunk's worth of data, process that data
|
||||
// without copying it into the buffer
|
||||
while input.len() - i >= size {
|
||||
func(input.index(&(i..(i + size))));
|
||||
func(&input[i..(i + size)]);
|
||||
i += size;
|
||||
}
|
||||
|
||||
@ -166,7 +166,7 @@ impl FixedBuffer for FixedBuffer64 {
|
||||
let input_remaining = input.len() - i;
|
||||
copy_memory(
|
||||
self.buffer.slice_to_mut(input_remaining),
|
||||
input.index(&(i..)));
|
||||
&input[i..]);
|
||||
self.buffer_idx += input_remaining;
|
||||
}
|
||||
|
||||
@ -188,7 +188,7 @@ impl FixedBuffer for FixedBuffer64 {
|
||||
fn full_buffer<'s>(&'s mut self) -> &'s [u8] {
|
||||
assert!(self.buffer_idx == 64);
|
||||
self.buffer_idx = 0;
|
||||
return self.buffer.index(&(0..64));
|
||||
return &self.buffer[0..64];
|
||||
}
|
||||
|
||||
fn position(&self) -> uint { self.buffer_idx }
|
||||
|
@ -64,7 +64,7 @@ impl Svh {
|
||||
}
|
||||
|
||||
pub fn as_str<'a>(&'a self) -> &'a str {
|
||||
self.hash.index(&FullRange)
|
||||
&self.hash[]
|
||||
}
|
||||
|
||||
pub fn calculate(metadata: &Vec<String>, krate: &ast::Crate) -> Svh {
|
||||
@ -362,7 +362,7 @@ mod svh_visitor {
|
||||
fn macro_name(mac: &Mac) -> token::InternedString {
|
||||
match &mac.node {
|
||||
&MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => {
|
||||
let s = path.segments.index(&FullRange);
|
||||
let s = &path.segments[];
|
||||
assert_eq!(s.len(), 1);
|
||||
content(s[0].identifier)
|
||||
}
|
||||
|
@ -224,8 +224,7 @@ impl Target {
|
||||
.and_then(|os| os.map(|s| s.to_string())) {
|
||||
Some(val) => val,
|
||||
None =>
|
||||
handler.fatal((format!("Field {} in target specification is required", name))
|
||||
.index(&FullRange))
|
||||
handler.fatal(&format!("Field {} in target specification is required", name)[])
|
||||
}
|
||||
};
|
||||
|
||||
@ -242,18 +241,18 @@ impl Target {
|
||||
macro_rules! key {
|
||||
($key_name:ident) => ( {
|
||||
let name = (stringify!($key_name)).replace("_", "-");
|
||||
obj.find(name.index(&FullRange)).map(|o| o.as_string()
|
||||
obj.find(&name[]).map(|o| o.as_string()
|
||||
.map(|s| base.options.$key_name = s.to_string()));
|
||||
} );
|
||||
($key_name:ident, bool) => ( {
|
||||
let name = (stringify!($key_name)).replace("_", "-");
|
||||
obj.find(name.index(&FullRange))
|
||||
obj.find(&name[])
|
||||
.map(|o| o.as_boolean()
|
||||
.map(|s| base.options.$key_name = s));
|
||||
} );
|
||||
($key_name:ident, list) => ( {
|
||||
let name = (stringify!($key_name)).replace("_", "-");
|
||||
obj.find(name.index(&FullRange)).map(|o| o.as_array()
|
||||
obj.find(&name[]).map(|o| o.as_array()
|
||||
.map(|v| base.options.$key_name = v.iter()
|
||||
.map(|a| a.as_string().unwrap().to_string()).collect()
|
||||
)
|
||||
@ -369,7 +368,7 @@ impl Target {
|
||||
|
||||
let target_path = os::getenv("RUST_TARGET_PATH").unwrap_or(String::new());
|
||||
|
||||
let paths = os::split_paths(target_path.index(&FullRange));
|
||||
let paths = os::split_paths(&target_path[]);
|
||||
// FIXME 16351: add a sane default search path?
|
||||
|
||||
for dir in paths.iter() {
|
||||
|
@ -463,38 +463,38 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
(ty::MutBorrow, ty::MutBorrow) => {
|
||||
self.bccx.span_err(
|
||||
new_loan.span,
|
||||
format!("cannot borrow `{}`{} as mutable \
|
||||
&format!("cannot borrow `{}`{} as mutable \
|
||||
more than once at a time",
|
||||
nl, new_loan_msg).index(&FullRange))
|
||||
nl, new_loan_msg)[])
|
||||
}
|
||||
|
||||
(ty::UniqueImmBorrow, _) => {
|
||||
self.bccx.span_err(
|
||||
new_loan.span,
|
||||
format!("closure requires unique access to `{}` \
|
||||
&format!("closure requires unique access to `{}` \
|
||||
but {} is already borrowed{}",
|
||||
nl, ol_pronoun, old_loan_msg).index(&FullRange));
|
||||
nl, ol_pronoun, old_loan_msg)[]);
|
||||
}
|
||||
|
||||
(_, ty::UniqueImmBorrow) => {
|
||||
self.bccx.span_err(
|
||||
new_loan.span,
|
||||
format!("cannot borrow `{}`{} as {} because \
|
||||
&format!("cannot borrow `{}`{} as {} because \
|
||||
previous closure requires unique access",
|
||||
nl, new_loan_msg, new_loan.kind.to_user_str()).index(&FullRange));
|
||||
nl, new_loan_msg, new_loan.kind.to_user_str())[]);
|
||||
}
|
||||
|
||||
(_, _) => {
|
||||
self.bccx.span_err(
|
||||
new_loan.span,
|
||||
format!("cannot borrow `{}`{} as {} because \
|
||||
&format!("cannot borrow `{}`{} as {} because \
|
||||
{} is also borrowed as {}{}",
|
||||
nl,
|
||||
new_loan_msg,
|
||||
new_loan.kind.to_user_str(),
|
||||
ol_pronoun,
|
||||
old_loan.kind.to_user_str(),
|
||||
old_loan_msg).index(&FullRange));
|
||||
old_loan_msg)[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -502,8 +502,8 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
euv::ClosureCapture(span) => {
|
||||
self.bccx.span_note(
|
||||
span,
|
||||
format!("borrow occurs due to use of `{}` in closure",
|
||||
nl).index(&FullRange));
|
||||
&format!("borrow occurs due to use of `{}` in closure",
|
||||
nl)[]);
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
@ -552,7 +552,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
|
||||
self.bccx.span_note(
|
||||
old_loan.span,
|
||||
format!("{}; {}", borrow_summary, rule_summary).index(&FullRange));
|
||||
&format!("{}; {}", borrow_summary, rule_summary)[]);
|
||||
|
||||
let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id());
|
||||
self.bccx.span_end_note(old_loan_span,
|
||||
@ -621,14 +621,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
UseWhileBorrowed(loan_path, loan_span) => {
|
||||
self.bccx.span_err(
|
||||
span,
|
||||
format!("cannot use `{}` because it was mutably borrowed",
|
||||
self.bccx.loan_path_to_string(copy_path).index(&FullRange))
|
||||
.index(&FullRange));
|
||||
&format!("cannot use `{}` because it was mutably borrowed",
|
||||
&self.bccx.loan_path_to_string(copy_path)[])
|
||||
[]);
|
||||
self.bccx.span_note(
|
||||
loan_span,
|
||||
format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_string(&*loan_path).index(&FullRange))
|
||||
.index(&FullRange));
|
||||
&format!("borrow of `{}` occurs here",
|
||||
&self.bccx.loan_path_to_string(&*loan_path)[])
|
||||
[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -647,20 +647,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
let err_message = match move_kind {
|
||||
move_data::Captured =>
|
||||
format!("cannot move `{}` into closure because it is borrowed",
|
||||
self.bccx.loan_path_to_string(move_path).index(&FullRange)),
|
||||
&self.bccx.loan_path_to_string(move_path)[]),
|
||||
move_data::Declared |
|
||||
move_data::MoveExpr |
|
||||
move_data::MovePat =>
|
||||
format!("cannot move out of `{}` because it is borrowed",
|
||||
self.bccx.loan_path_to_string(move_path).index(&FullRange))
|
||||
&self.bccx.loan_path_to_string(move_path)[])
|
||||
};
|
||||
|
||||
self.bccx.span_err(span, err_message.index(&FullRange));
|
||||
self.bccx.span_err(span, &err_message[]);
|
||||
self.bccx.span_note(
|
||||
loan_span,
|
||||
format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_string(&*loan_path).index(&FullRange))
|
||||
.index(&FullRange));
|
||||
&format!("borrow of `{}` occurs here",
|
||||
&self.bccx.loan_path_to_string(&*loan_path)[])
|
||||
[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -809,34 +809,34 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
if kind == ty::FnUnboxedClosureKind {
|
||||
self.bccx.span_err(
|
||||
assignment_span,
|
||||
format!("cannot assign to {}",
|
||||
self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange));
|
||||
&format!("cannot assign to {}",
|
||||
self.bccx.cmt_to_string(&*assignee_cmt))[]);
|
||||
self.bccx.span_help(
|
||||
self.tcx().map.span(upvar_id.closure_expr_id),
|
||||
"consider changing this closure to take self by mutable reference");
|
||||
} else {
|
||||
self.bccx.span_err(
|
||||
assignment_span,
|
||||
format!("cannot assign to {} {}",
|
||||
&format!("cannot assign to {} {}",
|
||||
assignee_cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange));
|
||||
self.bccx.cmt_to_string(&*assignee_cmt))[]);
|
||||
}
|
||||
}
|
||||
_ => match opt_loan_path(&assignee_cmt) {
|
||||
Some(lp) => {
|
||||
self.bccx.span_err(
|
||||
assignment_span,
|
||||
format!("cannot assign to {} {} `{}`",
|
||||
&format!("cannot assign to {} {} `{}`",
|
||||
assignee_cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_string(&*assignee_cmt),
|
||||
self.bccx.loan_path_to_string(&*lp)).index(&FullRange));
|
||||
self.bccx.loan_path_to_string(&*lp))[]);
|
||||
}
|
||||
None => {
|
||||
self.bccx.span_err(
|
||||
assignment_span,
|
||||
format!("cannot assign to {} {}",
|
||||
&format!("cannot assign to {} {}",
|
||||
assignee_cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange));
|
||||
self.bccx.cmt_to_string(&*assignee_cmt))[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -955,11 +955,11 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
loan: &Loan) {
|
||||
self.bccx.span_err(
|
||||
span,
|
||||
format!("cannot assign to `{}` because it is borrowed",
|
||||
self.bccx.loan_path_to_string(loan_path)).index(&FullRange));
|
||||
&format!("cannot assign to `{}` because it is borrowed",
|
||||
self.bccx.loan_path_to_string(loan_path))[]);
|
||||
self.bccx.span_note(
|
||||
loan.span,
|
||||
format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_string(loan_path)).index(&FullRange));
|
||||
&format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_string(loan_path))[]);
|
||||
}
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ enum Fragment {
|
||||
// This represents the collection of all but one of the elements
|
||||
// from an array at the path described by the move path index.
|
||||
// Note that attached MovePathIndex should have mem_categorization
|
||||
// of InteriorElement (i.e. array dereference `.index(&FullRange)`).
|
||||
// of InteriorElement (i.e. array dereference `&foo[]`).
|
||||
AllButOneFrom(MovePathIndex),
|
||||
}
|
||||
|
||||
@ -123,12 +123,12 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
|
||||
let attrs : &[ast::Attribute];
|
||||
attrs = match tcx.map.find(id) {
|
||||
Some(ast_map::NodeItem(ref item)) =>
|
||||
item.attrs.index(&FullRange),
|
||||
&item.attrs[],
|
||||
Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) =>
|
||||
m.attrs.index(&FullRange),
|
||||
&m.attrs[],
|
||||
Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) =>
|
||||
m.attrs.index(&FullRange),
|
||||
_ => [].index(&FullRange),
|
||||
&m.attrs[],
|
||||
_ => &[][],
|
||||
};
|
||||
|
||||
let span_err =
|
||||
@ -144,7 +144,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
|
||||
for (i, mpi) in vec_rc.iter().enumerate() {
|
||||
let render = |&:| this.path_loan_path(*mpi).user_string(tcx);
|
||||
if span_err {
|
||||
tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange));
|
||||
tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]);
|
||||
}
|
||||
if print {
|
||||
println!("id:{} {}[{}] `{}`", id, kind, i, render());
|
||||
@ -156,7 +156,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>,
|
||||
for (i, f) in vec_rc.iter().enumerate() {
|
||||
let render = |&:| f.loan_path_user_string(this, tcx);
|
||||
if span_err {
|
||||
tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange));
|
||||
tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render())[]);
|
||||
}
|
||||
if print {
|
||||
println!("id:{} {}[{}] `{}`", id, kind, i, render());
|
||||
@ -198,11 +198,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
|
||||
// First, filter out duplicates
|
||||
moved.sort();
|
||||
moved.dedup();
|
||||
debug!("fragments 1 moved: {:?}", path_lps(moved.index(&FullRange)));
|
||||
debug!("fragments 1 moved: {:?}", path_lps(&moved[]));
|
||||
|
||||
assigned.sort();
|
||||
assigned.dedup();
|
||||
debug!("fragments 1 assigned: {:?}", path_lps(assigned.index(&FullRange)));
|
||||
debug!("fragments 1 assigned: {:?}", path_lps(&assigned[]));
|
||||
|
||||
// Second, build parents from the moved and assigned.
|
||||
for m in moved.iter() {
|
||||
@ -222,14 +222,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
|
||||
|
||||
parents.sort();
|
||||
parents.dedup();
|
||||
debug!("fragments 2 parents: {:?}", path_lps(parents.index(&FullRange)));
|
||||
debug!("fragments 2 parents: {:?}", path_lps(&parents[]));
|
||||
|
||||
// Third, filter the moved and assigned fragments down to just the non-parents
|
||||
moved.retain(|f| non_member(*f, parents.index(&FullRange)));
|
||||
debug!("fragments 3 moved: {:?}", path_lps(moved.index(&FullRange)));
|
||||
moved.retain(|f| non_member(*f, &parents[]));
|
||||
debug!("fragments 3 moved: {:?}", path_lps(&moved[]));
|
||||
|
||||
assigned.retain(|f| non_member(*f, parents.index(&FullRange)));
|
||||
debug!("fragments 3 assigned: {:?}", path_lps(assigned.index(&FullRange)));
|
||||
assigned.retain(|f| non_member(*f, &parents[]));
|
||||
debug!("fragments 3 assigned: {:?}", path_lps(&assigned[]));
|
||||
|
||||
// Fourth, build the leftover from the moved, assigned, and parents.
|
||||
for m in moved.iter() {
|
||||
@ -247,16 +247,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
|
||||
|
||||
unmoved.sort();
|
||||
unmoved.dedup();
|
||||
debug!("fragments 4 unmoved: {:?}", frag_lps(unmoved.index(&FullRange)));
|
||||
debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[]));
|
||||
|
||||
// Fifth, filter the leftover fragments down to its core.
|
||||
unmoved.retain(|f| match *f {
|
||||
AllButOneFrom(_) => true,
|
||||
Just(mpi) => non_member(mpi, parents.index(&FullRange)) &&
|
||||
non_member(mpi, moved.index(&FullRange)) &&
|
||||
non_member(mpi, assigned.index(&FullRange))
|
||||
Just(mpi) => non_member(mpi, &parents[]) &&
|
||||
non_member(mpi, &moved[]) &&
|
||||
non_member(mpi, &assigned[])
|
||||
});
|
||||
debug!("fragments 5 unmoved: {:?}", frag_lps(unmoved.index(&FullRange)));
|
||||
debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[]));
|
||||
|
||||
// Swap contents back in.
|
||||
fragments.unmoved_fragments = unmoved;
|
||||
@ -433,7 +433,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>,
|
||||
let msg = format!("type {} ({:?}) is not fragmentable",
|
||||
parent_ty.repr(tcx), sty_and_variant_info);
|
||||
let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id));
|
||||
tcx.sess.opt_span_bug(opt_span, msg.index(&FullRange))
|
||||
tcx.sess.opt_span_bug(opt_span, &msg[])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -306,8 +306,8 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> {
|
||||
ty::ReInfer(..) => {
|
||||
self.tcx().sess.span_bug(
|
||||
cmt.span,
|
||||
format!("invalid borrow lifetime: {:?}",
|
||||
loan_region).index(&FullRange));
|
||||
&format!("invalid borrow lifetime: {:?}",
|
||||
loan_region)[]);
|
||||
}
|
||||
};
|
||||
debug!("loan_scope = {:?}", loan_scope);
|
||||
|
@ -119,8 +119,8 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
mc::cat_static_item => {
|
||||
bccx.span_err(
|
||||
move_from.span,
|
||||
format!("cannot move out of {}",
|
||||
bccx.cmt_to_string(&*move_from)).index(&FullRange));
|
||||
&format!("cannot move out of {}",
|
||||
bccx.cmt_to_string(&*move_from))[]);
|
||||
}
|
||||
|
||||
mc::cat_downcast(ref b, _) |
|
||||
@ -130,9 +130,9 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
| ty::ty_enum(did, _) if ty::has_dtor(bccx.tcx, did) => {
|
||||
bccx.span_err(
|
||||
move_from.span,
|
||||
format!("cannot move out of type `{}`, \
|
||||
&format!("cannot move out of type `{}`, \
|
||||
which defines the `Drop` trait",
|
||||
b.ty.user_string(bccx.tcx)).index(&FullRange));
|
||||
b.ty.user_string(bccx.tcx))[]);
|
||||
},
|
||||
_ => panic!("this path should not cause illegal move")
|
||||
}
|
||||
@ -152,13 +152,13 @@ fn note_move_destination(bccx: &BorrowckCtxt,
|
||||
"attempting to move value to here");
|
||||
bccx.span_help(
|
||||
move_to_span,
|
||||
format!("to prevent the move, \
|
||||
&format!("to prevent the move, \
|
||||
use `ref {0}` or `ref mut {0}` to capture value by \
|
||||
reference",
|
||||
pat_name).index(&FullRange));
|
||||
pat_name)[]);
|
||||
} else {
|
||||
bccx.span_note(move_to_span,
|
||||
format!("and here (use `ref {0}` or `ref mut {0}`)",
|
||||
pat_name).index(&FullRange));
|
||||
&format!("and here (use `ref {0}` or `ref mut {0}`)",
|
||||
pat_name)[]);
|
||||
}
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt,
|
||||
check_loans::check_loans(this,
|
||||
&loan_dfcx,
|
||||
flowed_moves,
|
||||
all_loans.index(&FullRange),
|
||||
&all_loans[],
|
||||
id,
|
||||
decl,
|
||||
body);
|
||||
@ -505,7 +505,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
pub fn report(&self, err: BckError<'tcx>) {
|
||||
self.span_err(
|
||||
err.span,
|
||||
self.bckerr_to_string(&err).index(&FullRange));
|
||||
&self.bckerr_to_string(&err)[]);
|
||||
self.note_and_explain_bckerr(err);
|
||||
}
|
||||
|
||||
@ -525,9 +525,9 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
move_data::Declared => {
|
||||
self.tcx.sess.span_err(
|
||||
use_span,
|
||||
format!("{} of possibly uninitialized variable: `{}`",
|
||||
&format!("{} of possibly uninitialized variable: `{}`",
|
||||
verb,
|
||||
self.loan_path_to_string(lp)).index(&FullRange));
|
||||
self.loan_path_to_string(lp))[]);
|
||||
(self.loan_path_to_string(moved_lp),
|
||||
String::new())
|
||||
}
|
||||
@ -566,10 +566,10 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
else { "" };
|
||||
self.tcx.sess.span_err(
|
||||
use_span,
|
||||
format!("{} of {}moved value: `{}`",
|
||||
&format!("{} of {}moved value: `{}`",
|
||||
verb,
|
||||
msg,
|
||||
nl).index(&FullRange));
|
||||
nl)[]);
|
||||
(ol, moved_lp_msg)
|
||||
}
|
||||
};
|
||||
@ -585,32 +585,32 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
(ty::expr_ty_adjusted(self.tcx, &*expr), expr.span)
|
||||
}
|
||||
r => {
|
||||
self.tcx.sess.bug(format!("MoveExpr({}) maps to \
|
||||
self.tcx.sess.bug(&format!("MoveExpr({}) maps to \
|
||||
{:?}, not Expr",
|
||||
the_move.id,
|
||||
r).index(&FullRange))
|
||||
r)[])
|
||||
}
|
||||
};
|
||||
let (suggestion, _) =
|
||||
move_suggestion(param_env, expr_span, expr_ty, ("moved by default", ""));
|
||||
self.tcx.sess.span_note(
|
||||
expr_span,
|
||||
format!("`{}` moved here{} because it has type `{}`, which is {}",
|
||||
&format!("`{}` moved here{} because it has type `{}`, which is {}",
|
||||
ol,
|
||||
moved_lp_msg,
|
||||
expr_ty.user_string(self.tcx),
|
||||
suggestion).index(&FullRange));
|
||||
suggestion)[]);
|
||||
}
|
||||
|
||||
move_data::MovePat => {
|
||||
let pat_ty = ty::node_id_to_type(self.tcx, the_move.id);
|
||||
let span = self.tcx.map.span(the_move.id);
|
||||
self.tcx.sess.span_note(span,
|
||||
format!("`{}` moved here{} because it has type `{}`, \
|
||||
&format!("`{}` moved here{} because it has type `{}`, \
|
||||
which is moved by default",
|
||||
ol,
|
||||
moved_lp_msg,
|
||||
pat_ty.user_string(self.tcx)).index(&FullRange));
|
||||
pat_ty.user_string(self.tcx))[]);
|
||||
self.tcx.sess.span_help(span,
|
||||
"use `ref` to override");
|
||||
}
|
||||
@ -623,10 +623,10 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
(ty::expr_ty_adjusted(self.tcx, &*expr), expr.span)
|
||||
}
|
||||
r => {
|
||||
self.tcx.sess.bug(format!("Captured({}) maps to \
|
||||
self.tcx.sess.bug(&format!("Captured({}) maps to \
|
||||
{:?}, not Expr",
|
||||
the_move.id,
|
||||
r).index(&FullRange))
|
||||
r)[])
|
||||
}
|
||||
};
|
||||
let (suggestion, help) =
|
||||
@ -637,12 +637,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
"make a copy and capture that instead to override"));
|
||||
self.tcx.sess.span_note(
|
||||
expr_span,
|
||||
format!("`{}` moved into closure environment here{} because it \
|
||||
&format!("`{}` moved into closure environment here{} because it \
|
||||
has type `{}`, which is {}",
|
||||
ol,
|
||||
moved_lp_msg,
|
||||
expr_ty.user_string(self.tcx),
|
||||
suggestion).index(&FullRange));
|
||||
suggestion)[]);
|
||||
self.tcx.sess.span_help(expr_span, help);
|
||||
}
|
||||
}
|
||||
@ -672,8 +672,8 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
&move_data::Assignment) {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("re-assignment of immutable variable `{}`",
|
||||
self.loan_path_to_string(lp)).index(&FullRange));
|
||||
&format!("re-assignment of immutable variable `{}`",
|
||||
self.loan_path_to_string(lp))[]);
|
||||
self.tcx.sess.span_note(assign.span, "prior assignment occurs here");
|
||||
}
|
||||
|
||||
@ -798,8 +798,8 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
mc::AliasableOther => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("{} in an aliasable location",
|
||||
prefix).index(&FullRange));
|
||||
&format!("{} in an aliasable location",
|
||||
prefix)[]);
|
||||
}
|
||||
mc::AliasableClosure(id) => {
|
||||
self.tcx.sess.span_err(span,
|
||||
@ -812,12 +812,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
mc::AliasableStaticMut(..) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("{} in a static location", prefix).index(&FullRange));
|
||||
&format!("{} in a static location", prefix)[]);
|
||||
}
|
||||
mc::AliasableBorrowed => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("{} in a `&` reference", prefix).index(&FullRange));
|
||||
&format!("{} in a `&` reference", prefix)[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -884,13 +884,13 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
};
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
format!("{} would have to be valid for ",
|
||||
descr).index(&FullRange),
|
||||
&format!("{} would have to be valid for ",
|
||||
descr)[],
|
||||
loan_scope,
|
||||
"...");
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
format!("...but {} is only valid for ", descr).index(&FullRange),
|
||||
&format!("...but {} is only valid for ", descr)[],
|
||||
ptr_scope,
|
||||
"");
|
||||
}
|
||||
@ -910,7 +910,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
out.push('(');
|
||||
self.append_loan_path_to_string(&**lp_base, out);
|
||||
out.push_str(DOWNCAST_PRINTED_OPERATOR);
|
||||
out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange));
|
||||
out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]);
|
||||
out.push(')');
|
||||
}
|
||||
|
||||
@ -924,7 +924,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
}
|
||||
mc::PositionalField(idx) => {
|
||||
out.push('.');
|
||||
out.push_str(idx.to_string().index(&FullRange));
|
||||
out.push_str(&idx.to_string()[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -956,7 +956,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
|
||||
out.push('(');
|
||||
self.append_autoderefd_loan_path_to_string(&**lp_base, out);
|
||||
out.push(':');
|
||||
out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange));
|
||||
out.push_str(&ty::item_path_str(self.tcx, variant_def_id)[]);
|
||||
out.push(')');
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
|
||||
if seen_one { sets.push_str(" "); } else { seen_one = true; }
|
||||
sets.push_str(variant.short_name());
|
||||
sets.push_str(": ");
|
||||
sets.push_str(self.dataflow_for_variant(e, n, variant).index(&FullRange));
|
||||
sets.push_str(&self.dataflow_for_variant(e, n, variant)[]);
|
||||
}
|
||||
sets
|
||||
}
|
||||
@ -89,7 +89,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
|
||||
set.push_str(", ");
|
||||
}
|
||||
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
|
||||
set.push_str(loan_str.index(&FullRange));
|
||||
set.push_str(&loan_str[]);
|
||||
saw_some = true;
|
||||
true
|
||||
});
|
||||
|
@ -58,12 +58,12 @@ pub fn compile_input(sess: Session,
|
||||
let outputs = build_output_filenames(input,
|
||||
outdir,
|
||||
output,
|
||||
krate.attrs.index(&FullRange),
|
||||
&krate.attrs[],
|
||||
&sess);
|
||||
let id = link::find_crate_name(Some(&sess), krate.attrs.index(&FullRange),
|
||||
let id = link::find_crate_name(Some(&sess), &krate.attrs[],
|
||||
input);
|
||||
let expanded_crate
|
||||
= match phase_2_configure_and_expand(&sess, krate, id.index(&FullRange),
|
||||
= match phase_2_configure_and_expand(&sess, krate, &id[],
|
||||
addl_plugins) {
|
||||
None => return,
|
||||
Some(k) => k
|
||||
@ -75,7 +75,7 @@ pub fn compile_input(sess: Session,
|
||||
let mut forest = ast_map::Forest::new(expanded_crate);
|
||||
let ast_map = assign_node_ids_and_map(&sess, &mut forest);
|
||||
|
||||
write_out_deps(&sess, input, &outputs, id.index(&FullRange));
|
||||
write_out_deps(&sess, input, &outputs, &id[]);
|
||||
|
||||
if stop_after_phase_2(&sess) { return; }
|
||||
|
||||
@ -171,9 +171,9 @@ pub fn phase_2_configure_and_expand(sess: &Session,
|
||||
let time_passes = sess.time_passes();
|
||||
|
||||
*sess.crate_types.borrow_mut() =
|
||||
collect_crate_types(sess, krate.attrs.index(&FullRange));
|
||||
collect_crate_types(sess, &krate.attrs[]);
|
||||
*sess.crate_metadata.borrow_mut() =
|
||||
collect_crate_metadata(sess, krate.attrs.index(&FullRange));
|
||||
collect_crate_metadata(sess, &krate.attrs[]);
|
||||
|
||||
time(time_passes, "recursion limit", (), |_| {
|
||||
middle::recursion_limit::update_recursion_limit(sess, &krate);
|
||||
@ -268,8 +268,8 @@ pub fn phase_2_configure_and_expand(sess: &Session,
|
||||
if cfg!(windows) {
|
||||
_old_path = os::getenv("PATH").unwrap_or(_old_path);
|
||||
let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths();
|
||||
new_path.extend(os::split_paths(_old_path.index(&FullRange)).into_iter());
|
||||
os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap());
|
||||
new_path.extend(os::split_paths(&_old_path[]).into_iter());
|
||||
os::setenv("PATH", os::join_paths(&new_path[]).unwrap());
|
||||
}
|
||||
let cfg = syntax::ext::expand::ExpansionConfig {
|
||||
crate_name: crate_name.to_string(),
|
||||
@ -533,7 +533,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
|
||||
time(sess.time_passes(), "LLVM passes", (), |_|
|
||||
write::run_passes(sess,
|
||||
trans,
|
||||
sess.opts.output_types.index(&FullRange),
|
||||
&sess.opts.output_types[],
|
||||
outputs));
|
||||
}
|
||||
|
||||
@ -547,14 +547,14 @@ pub fn phase_6_link_output(sess: &Session,
|
||||
outputs: &OutputFilenames) {
|
||||
let old_path = os::getenv("PATH").unwrap_or_else(||String::new());
|
||||
let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths();
|
||||
new_path.extend(os::split_paths(old_path.index(&FullRange)).into_iter());
|
||||
os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap());
|
||||
new_path.extend(os::split_paths(&old_path[]).into_iter());
|
||||
os::setenv("PATH", os::join_paths(&new_path[]).unwrap());
|
||||
|
||||
time(sess.time_passes(), "linking", (), |_|
|
||||
link::link_binary(sess,
|
||||
trans,
|
||||
outputs,
|
||||
trans.link.crate_name.index(&FullRange)));
|
||||
&trans.link.crate_name[]));
|
||||
|
||||
os::setenv("PATH", old_path);
|
||||
}
|
||||
@ -643,7 +643,7 @@ fn write_out_deps(sess: &Session,
|
||||
// write Makefile-compatible dependency rules
|
||||
let files: Vec<String> = sess.codemap().files.borrow()
|
||||
.iter().filter(|fmap| fmap.is_real_file())
|
||||
.map(|fmap| escape_dep_filename(fmap.name.index(&FullRange)))
|
||||
.map(|fmap| escape_dep_filename(&fmap.name[]))
|
||||
.collect();
|
||||
let mut file = try!(io::File::create(&deps_filename));
|
||||
for path in out_filenames.iter() {
|
||||
@ -656,8 +656,8 @@ fn write_out_deps(sess: &Session,
|
||||
match result {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
sess.fatal(format!("error writing dependencies to `{}`: {}",
|
||||
deps_filename.display(), e).index(&FullRange));
|
||||
sess.fatal(&format!("error writing dependencies to `{}`: {}",
|
||||
deps_filename.display(), e)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -726,9 +726,9 @@ pub fn collect_crate_types(session: &Session,
|
||||
let res = !link::invalid_output_for_target(session, *crate_type);
|
||||
|
||||
if !res {
|
||||
session.warn(format!("dropping unsupported crate type `{:?}` \
|
||||
session.warn(&format!("dropping unsupported crate type `{:?}` \
|
||||
for target `{}`",
|
||||
*crate_type, session.opts.target_triple).index(&FullRange));
|
||||
*crate_type, session.opts.target_triple)[]);
|
||||
}
|
||||
|
||||
res
|
||||
|
@ -90,12 +90,12 @@ fn run_compiler(args: &[String]) {
|
||||
let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS);
|
||||
match matches.opt_str("explain") {
|
||||
Some(ref code) => {
|
||||
match descriptions.find_description(code.index(&FullRange)) {
|
||||
match descriptions.find_description(&code[]) {
|
||||
Some(ref description) => {
|
||||
println!("{}", description);
|
||||
}
|
||||
None => {
|
||||
early_error(format!("no extended information for {}", code).index(&FullRange));
|
||||
early_error(&format!("no extended information for {}", code)[]);
|
||||
}
|
||||
}
|
||||
return;
|
||||
@ -121,7 +121,7 @@ fn run_compiler(args: &[String]) {
|
||||
early_error("no input filename given");
|
||||
}
|
||||
1u => {
|
||||
let ifile = matches.free[0].index(&FullRange);
|
||||
let ifile = &matches.free[0][];
|
||||
if ifile == "-" {
|
||||
let contents = io::stdin().read_to_end().unwrap();
|
||||
let src = String::from_utf8(contents).unwrap();
|
||||
@ -317,7 +317,7 @@ Available lint options:
|
||||
for lint in lints.into_iter() {
|
||||
let name = lint.name_lower().replace("_", "-");
|
||||
println!(" {} {:7.7} {}",
|
||||
padded(name.index(&FullRange)), lint.default_level.as_str(), lint.desc);
|
||||
padded(&name[]), lint.default_level.as_str(), lint.desc);
|
||||
}
|
||||
println!("\n");
|
||||
};
|
||||
@ -347,7 +347,7 @@ Available lint options:
|
||||
let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
|
||||
.collect::<Vec<String>>().connect(", ");
|
||||
println!(" {} {}",
|
||||
padded(name.index(&FullRange)), desc);
|
||||
padded(&name[]), desc);
|
||||
}
|
||||
println!("\n");
|
||||
};
|
||||
@ -413,7 +413,7 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
|
||||
}
|
||||
|
||||
let matches =
|
||||
match getopts::getopts(args.index(&FullRange), config::optgroups().index(&FullRange)) {
|
||||
match getopts::getopts(&args[], &config::optgroups()[]) {
|
||||
Ok(m) => m,
|
||||
Err(f_stable_attempt) => {
|
||||
// redo option parsing, including unstable options this time,
|
||||
@ -587,15 +587,15 @@ pub fn monitor<F:FnOnce()+Send>(f: F) {
|
||||
"run with `RUST_BACKTRACE=1` for a backtrace".to_string(),
|
||||
];
|
||||
for note in xs.iter() {
|
||||
emitter.emit(None, note.index(&FullRange), None, diagnostic::Note)
|
||||
emitter.emit(None, ¬e[], None, diagnostic::Note)
|
||||
}
|
||||
|
||||
match r.read_to_string() {
|
||||
Ok(s) => println!("{}", s),
|
||||
Err(e) => {
|
||||
emitter.emit(None,
|
||||
format!("failed to read internal \
|
||||
stderr: {}", e).index(&FullRange),
|
||||
&format!("failed to read internal \
|
||||
stderr: {}", e)[],
|
||||
None,
|
||||
diagnostic::Error)
|
||||
}
|
||||
|
@ -294,9 +294,9 @@ impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> {
|
||||
try!(pp::word(&mut s.s, "as"));
|
||||
try!(pp::space(&mut s.s));
|
||||
try!(pp::word(&mut s.s,
|
||||
ppaux::ty_to_string(
|
||||
&ppaux::ty_to_string(
|
||||
tcx,
|
||||
ty::expr_ty(tcx, expr)).index(&FullRange)));
|
||||
ty::expr_ty(tcx, expr))[]));
|
||||
s.pclose()
|
||||
}
|
||||
_ => Ok(())
|
||||
@ -350,8 +350,8 @@ impl<'a, 'ast> Iterator for NodesMatchingUII<'a, 'ast> {
|
||||
|
||||
fn next(&mut self) -> Option<ast::NodeId> {
|
||||
match self {
|
||||
&NodesMatchingDirect(ref mut iter) => iter.next(),
|
||||
&NodesMatchingSuffix(ref mut iter) => iter.next(),
|
||||
&mut NodesMatchingDirect(ref mut iter) => iter.next(),
|
||||
&mut NodesMatchingSuffix(ref mut iter) => iter.next(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -370,7 +370,7 @@ impl UserIdentifiedItem {
|
||||
ItemViaNode(node_id) =>
|
||||
NodesMatchingDirect(Some(node_id).into_iter()),
|
||||
ItemViaPath(ref parts) =>
|
||||
NodesMatchingSuffix(map.nodes_matching_suffix(parts.index(&FullRange))),
|
||||
NodesMatchingSuffix(map.nodes_matching_suffix(&parts[])),
|
||||
}
|
||||
}
|
||||
|
||||
@ -382,7 +382,7 @@ impl UserIdentifiedItem {
|
||||
user_option,
|
||||
self.reconstructed_input(),
|
||||
is_wrong_because);
|
||||
sess.fatal(message.index(&FullRange))
|
||||
sess.fatal(&message[])
|
||||
};
|
||||
|
||||
let mut saw_node = ast::DUMMY_NODE_ID;
|
||||
@ -509,7 +509,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
let is_expanded = needs_expansion(&ppm);
|
||||
let compute_ast_map = needs_ast_map(&ppm, &opt_uii);
|
||||
let krate = if compute_ast_map {
|
||||
match driver::phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), None) {
|
||||
match driver::phase_2_configure_and_expand(&sess, krate, &id[], None) {
|
||||
None => return,
|
||||
Some(k) => k
|
||||
}
|
||||
@ -528,7 +528,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
};
|
||||
|
||||
let src_name = driver::source_name(input);
|
||||
let src = sess.codemap().get_filemap(src_name.index(&FullRange))
|
||||
let src = sess.codemap().get_filemap(&src_name[])
|
||||
.src.as_bytes().to_vec();
|
||||
let mut rdr = MemReader::new(src);
|
||||
|
||||
@ -588,16 +588,16 @@ pub fn pretty_print_input(sess: Session,
|
||||
(PpmFlowGraph, opt_uii) => {
|
||||
debug!("pretty printing flow graph for {:?}", opt_uii);
|
||||
let uii = opt_uii.unwrap_or_else(|| {
|
||||
sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or
|
||||
unique path suffix (b::c::d)").index(&FullRange))
|
||||
sess.fatal(&format!("`pretty flowgraph=..` needs NodeId (int) or
|
||||
unique path suffix (b::c::d)")[])
|
||||
|
||||
});
|
||||
let ast_map = ast_map.expect("--pretty flowgraph missing ast_map");
|
||||
let nodeid = uii.to_one_node_id("--pretty", &sess, &ast_map);
|
||||
|
||||
let node = ast_map.find(nodeid).unwrap_or_else(|| {
|
||||
sess.fatal(format!("--pretty flowgraph couldn't find id: {}",
|
||||
nodeid).index(&FullRange))
|
||||
sess.fatal(&format!("--pretty flowgraph couldn't find id: {}",
|
||||
nodeid)[])
|
||||
});
|
||||
|
||||
let code = blocks::Code::from_node(node);
|
||||
@ -615,8 +615,8 @@ pub fn pretty_print_input(sess: Session,
|
||||
// point to what was found, if there's an
|
||||
// accessible span.
|
||||
match ast_map.opt_span(nodeid) {
|
||||
Some(sp) => sess.span_fatal(sp, message.index(&FullRange)),
|
||||
None => sess.fatal(message.index(&FullRange))
|
||||
Some(sp) => sess.span_fatal(sp, &message[]),
|
||||
None => sess.fatal(&message[])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -279,7 +279,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
|
||||
|
||||
pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> {
|
||||
let name = format!("T{}", index);
|
||||
ty::mk_param(self.infcx.tcx, space, index, token::intern(name.index(&FullRange)))
|
||||
ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[]))
|
||||
}
|
||||
|
||||
pub fn re_early_bound(&self,
|
||||
|
@ -219,16 +219,16 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
|
||||
// had the duplicate.
|
||||
let ns = ns.unwrap();
|
||||
self.resolve_error(sp,
|
||||
format!("duplicate definition of {} `{}`",
|
||||
&format!("duplicate definition of {} `{}`",
|
||||
namespace_error_to_string(duplicate_type),
|
||||
token::get_name(name)).index(&FullRange));
|
||||
token::get_name(name))[]);
|
||||
{
|
||||
let r = child.span_for_namespace(ns);
|
||||
for sp in r.iter() {
|
||||
self.session.span_note(*sp,
|
||||
format!("first definition of {} `{}` here",
|
||||
&format!("first definition of {} `{}` here",
|
||||
namespace_error_to_string(duplicate_type),
|
||||
token::get_name(name)).index(&FullRange));
|
||||
token::get_name(name))[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1200,8 +1200,8 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
|
||||
SingleImport(target, _) => {
|
||||
debug!("(building import directive) building import \
|
||||
directive: {}::{}",
|
||||
self.names_to_string(module_.imports.borrow().last().unwrap()
|
||||
.module_path.index(&FullRange)),
|
||||
self.names_to_string(&module_.imports.borrow().last().unwrap().
|
||||
module_path[]),
|
||||
token::get_name(target));
|
||||
|
||||
let mut import_resolutions = module_.import_resolutions
|
||||
|
@ -1058,11 +1058,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
};
|
||||
let msg = format!("unresolved import `{}`{}",
|
||||
self.import_path_to_string(
|
||||
import_directive.module_path
|
||||
.index(&FullRange),
|
||||
&import_directive.module_path[],
|
||||
import_directive.subclass),
|
||||
help);
|
||||
self.resolve_error(span, msg.index(&FullRange));
|
||||
self.resolve_error(span, &msg[]);
|
||||
}
|
||||
Indeterminate => break, // Bail out. We'll come around next time.
|
||||
Success(()) => () // Good. Continue.
|
||||
@ -1092,7 +1091,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
.iter()
|
||||
.map(|seg| seg.identifier.name)
|
||||
.collect();
|
||||
self.names_to_string(names.index(&FullRange))
|
||||
self.names_to_string(&names[])
|
||||
}
|
||||
|
||||
fn import_directive_subclass_to_string(&mut self,
|
||||
@ -1156,7 +1155,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let module_path = &import_directive.module_path;
|
||||
|
||||
debug!("(resolving import for module) resolving import `{}::...` in `{}`",
|
||||
self.names_to_string(module_path.index(&FullRange)),
|
||||
self.names_to_string(&module_path[]),
|
||||
self.module_to_string(&*module_));
|
||||
|
||||
// First, resolve the module path for the directive, if necessary.
|
||||
@ -1165,7 +1164,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
Some((self.graph_root.get_module(), LastMod(AllPublic)))
|
||||
} else {
|
||||
match self.resolve_module_path(module_.clone(),
|
||||
module_path.index(&FullRange),
|
||||
&module_path[],
|
||||
DontUseLexicalScope,
|
||||
import_directive.span,
|
||||
ImportSearch) {
|
||||
@ -1762,7 +1761,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
ValueNS => "value",
|
||||
},
|
||||
token::get_name(name).get());
|
||||
self.session.span_err(import_span, msg.index(&FullRange));
|
||||
self.session.span_err(import_span, &msg[]);
|
||||
}
|
||||
Some(_) | None => {}
|
||||
}
|
||||
@ -1777,7 +1776,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) {
|
||||
let msg = format!("`{}` is not directly importable",
|
||||
token::get_name(name));
|
||||
self.session.span_err(import_span, msg.index(&FullRange));
|
||||
self.session.span_err(import_span, &msg[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1802,7 +1801,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
crate in this module \
|
||||
(maybe you meant `use {0}::*`?)",
|
||||
token::get_name(name).get());
|
||||
self.session.span_err(import_span, msg.index(&FullRange));
|
||||
self.session.span_err(import_span, &msg[]);
|
||||
}
|
||||
Some(_) | None => {}
|
||||
}
|
||||
@ -1824,7 +1823,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("import `{}` conflicts with value \
|
||||
in this module",
|
||||
token::get_name(name).get());
|
||||
self.session.span_err(import_span, msg.index(&FullRange));
|
||||
self.session.span_err(import_span, &msg[]);
|
||||
if let Some(span) = value.value_span {
|
||||
self.session.span_note(span,
|
||||
"conflicting value here");
|
||||
@ -1842,7 +1841,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("import `{}` conflicts with type in \
|
||||
this module",
|
||||
token::get_name(name).get());
|
||||
self.session.span_err(import_span, msg.index(&FullRange));
|
||||
self.session.span_err(import_span, &msg[]);
|
||||
if let Some(span) = ty.type_span {
|
||||
self.session.span_note(span,
|
||||
"note conflicting type here")
|
||||
@ -1855,7 +1854,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("inherent implementations \
|
||||
are only allowed on types \
|
||||
defined in the current module");
|
||||
self.session.span_err(span, msg.index(&FullRange));
|
||||
self.session.span_err(span, &msg[]);
|
||||
self.session.span_note(import_span,
|
||||
"import from other module here")
|
||||
}
|
||||
@ -1864,7 +1863,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("import `{}` conflicts with existing \
|
||||
submodule",
|
||||
token::get_name(name).get());
|
||||
self.session.span_err(import_span, msg.index(&FullRange));
|
||||
self.session.span_err(import_span, &msg[]);
|
||||
if let Some(span) = ty.type_span {
|
||||
self.session.span_note(span,
|
||||
"note conflicting module here")
|
||||
@ -1892,9 +1891,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
if module.external_module_children.borrow().contains_key(&name) {
|
||||
self.session
|
||||
.span_err(span,
|
||||
format!("an external crate named `{}` has already \
|
||||
&format!("an external crate named `{}` has already \
|
||||
been imported into this module",
|
||||
token::get_name(name).get()).index(&FullRange));
|
||||
token::get_name(name).get())[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1910,10 +1909,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
if module.external_module_children.borrow().contains_key(&name) {
|
||||
self.session
|
||||
.span_err(span,
|
||||
format!("the name `{}` conflicts with an external \
|
||||
&format!("the name `{}` conflicts with an external \
|
||||
crate that has been imported into this \
|
||||
module",
|
||||
token::get_name(name).get()).index(&FullRange));
|
||||
token::get_name(name).get())[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1961,7 +1960,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let segment_name = token::get_name(name);
|
||||
let module_name = self.module_to_string(&*search_module);
|
||||
let mut span = span;
|
||||
let msg = if "???" == module_name.index(&FullRange) {
|
||||
let msg = if "???" == &module_name[] {
|
||||
span.hi = span.lo + Pos::from_uint(segment_name.get().len());
|
||||
|
||||
match search_parent_externals(name,
|
||||
@ -2074,14 +2073,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
match module_prefix_result {
|
||||
Failed(None) => {
|
||||
let mpath = self.names_to_string(module_path);
|
||||
let mpath = mpath.index(&FullRange);
|
||||
let mpath = &mpath[];
|
||||
match mpath.rfind(':') {
|
||||
Some(idx) => {
|
||||
let msg = format!("Could not find `{}` in `{}`",
|
||||
// idx +- 1 to account for the
|
||||
// colons on either side
|
||||
mpath.index(&((idx + 1)..)),
|
||||
mpath.index(&(0..(idx - 1))));
|
||||
&mpath[(idx + 1)..],
|
||||
&mpath[0..(idx - 1)]);
|
||||
return Failed(Some((span, msg)));
|
||||
},
|
||||
None => {
|
||||
@ -2255,8 +2254,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
PathSearch,
|
||||
true) {
|
||||
Failed(Some((span, msg))) =>
|
||||
self.resolve_error(span, format!("failed to resolve. {}",
|
||||
msg).index(&FullRange)),
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]),
|
||||
Failed(None) => (), // Continue up the search chain.
|
||||
Indeterminate => {
|
||||
// We couldn't see through the higher scope because of an
|
||||
@ -2516,7 +2515,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
} else {
|
||||
let err = format!("unresolved import (maybe you meant `{}::*`?)",
|
||||
sn);
|
||||
self.resolve_error((*imports)[index].span, err.index(&FullRange));
|
||||
self.resolve_error((*imports)[index].span, &err[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2608,7 +2607,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
match def_like {
|
||||
DlDef(d @ DefUpvar(..)) => {
|
||||
self.session.span_bug(span,
|
||||
format!("unexpected {:?} in bindings", d).index(&FullRange))
|
||||
&format!("unexpected {:?} in bindings", d)[])
|
||||
}
|
||||
DlDef(d @ DefLocal(_)) => {
|
||||
let node_id = d.def_id().node;
|
||||
@ -2754,7 +2753,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
for (i, rib) in ribs.iter().enumerate().rev() {
|
||||
match rib.bindings.get(&name).cloned() {
|
||||
Some(def_like) => {
|
||||
return self.upvarify(ribs.index(&((i + 1)..)), def_like, span);
|
||||
return self.upvarify(&ribs[(i + 1)..], def_like, span);
|
||||
}
|
||||
None => {
|
||||
// Continue.
|
||||
@ -2847,7 +2846,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
generics,
|
||||
implemented_traits,
|
||||
&**self_type,
|
||||
impl_items.index(&FullRange));
|
||||
&impl_items[]);
|
||||
}
|
||||
|
||||
ItemTrait(_, ref generics, ref bounds, ref trait_items) => {
|
||||
@ -2925,7 +2924,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
ItemStruct(ref struct_def, ref generics) => {
|
||||
self.resolve_struct(item.id,
|
||||
generics,
|
||||
struct_def.fields.index(&FullRange));
|
||||
&struct_def.fields[]);
|
||||
}
|
||||
|
||||
ItemMod(ref module_) => {
|
||||
@ -2996,12 +2995,12 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
|
||||
if seen_bindings.contains(&name) {
|
||||
self.resolve_error(type_parameter.span,
|
||||
format!("the name `{}` is already \
|
||||
&format!("the name `{}` is already \
|
||||
used for a type \
|
||||
parameter in this type \
|
||||
parameter list",
|
||||
token::get_name(
|
||||
name)).index(&FullRange))
|
||||
name))[])
|
||||
}
|
||||
seen_bindings.insert(name);
|
||||
|
||||
@ -3173,7 +3172,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
};
|
||||
|
||||
let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str);
|
||||
self.resolve_error(trait_reference.path.span, msg.index(&FullRange));
|
||||
self.resolve_error(trait_reference.path.span, &msg[]);
|
||||
}
|
||||
Some(def) => {
|
||||
match def {
|
||||
@ -3183,16 +3182,16 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
}
|
||||
(def, _) => {
|
||||
self.resolve_error(trait_reference.path.span,
|
||||
format!("`{}` is not a trait",
|
||||
&format!("`{}` is not a trait",
|
||||
self.path_names_to_string(
|
||||
&trait_reference.path)).index(&FullRange));
|
||||
&trait_reference.path))[]);
|
||||
|
||||
// If it's a typedef, give a note
|
||||
if let DefTy(..) = def {
|
||||
self.session.span_note(
|
||||
trait_reference.path.span,
|
||||
format!("`type` aliases cannot be used for traits")
|
||||
.index(&FullRange));
|
||||
&format!("`type` aliases cannot be used for traits")
|
||||
[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3387,9 +3386,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
if self.trait_item_map.get(&(name, did)).is_none() {
|
||||
let path_str = self.path_names_to_string(&trait_ref.path);
|
||||
self.resolve_error(span,
|
||||
format!("method `{}` is not a member of trait `{}`",
|
||||
&format!("method `{}` is not a member of trait `{}`",
|
||||
token::get_name(name),
|
||||
path_str).index(&FullRange));
|
||||
path_str)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3455,19 +3454,19 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
None => {
|
||||
self.resolve_error(
|
||||
p.span,
|
||||
format!("variable `{}` from pattern #1 is \
|
||||
&format!("variable `{}` from pattern #1 is \
|
||||
not bound in pattern #{}",
|
||||
token::get_name(key),
|
||||
i + 1).index(&FullRange));
|
||||
i + 1)[]);
|
||||
}
|
||||
Some(binding_i) => {
|
||||
if binding_0.binding_mode != binding_i.binding_mode {
|
||||
self.resolve_error(
|
||||
binding_i.span,
|
||||
format!("variable `{}` is bound with different \
|
||||
&format!("variable `{}` is bound with different \
|
||||
mode in pattern #{} than in pattern #1",
|
||||
token::get_name(key),
|
||||
i + 1).index(&FullRange));
|
||||
i + 1)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3477,10 +3476,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
if !map_0.contains_key(&key) {
|
||||
self.resolve_error(
|
||||
binding.span,
|
||||
format!("variable `{}` from pattern {}{} is \
|
||||
&format!("variable `{}` from pattern {}{} is \
|
||||
not bound in pattern {}1",
|
||||
token::get_name(key),
|
||||
"#", i + 1, "#").index(&FullRange));
|
||||
"#", i + 1, "#")[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3595,7 +3594,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
None => {
|
||||
let msg = format!("use of undeclared type name `{}`",
|
||||
self.path_names_to_string(path));
|
||||
self.resolve_error(ty.span, msg.index(&FullRange));
|
||||
self.resolve_error(ty.span, &msg[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3664,10 +3663,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
FoundStructOrEnumVariant(..) => {
|
||||
self.resolve_error(
|
||||
pattern.span,
|
||||
format!("declaration of `{}` shadows an enum \
|
||||
&format!("declaration of `{}` shadows an enum \
|
||||
variant or unit-like struct in \
|
||||
scope",
|
||||
token::get_name(renamed)).index(&FullRange));
|
||||
token::get_name(renamed))[]);
|
||||
}
|
||||
FoundConst(ref def, lp) if mode == RefutableMode => {
|
||||
debug!("(resolving pattern) resolving `{}` to \
|
||||
@ -3712,23 +3711,23 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
// Forbid duplicate bindings in the same
|
||||
// parameter list.
|
||||
self.resolve_error(pattern.span,
|
||||
format!("identifier `{}` \
|
||||
&format!("identifier `{}` \
|
||||
is bound more \
|
||||
than once in \
|
||||
this parameter \
|
||||
list",
|
||||
token::get_ident(
|
||||
ident))
|
||||
.index(&FullRange))
|
||||
[])
|
||||
} else if bindings_list.get(&renamed) ==
|
||||
Some(&pat_id) {
|
||||
// Then this is a duplicate variable in the
|
||||
// same disjunction, which is an error.
|
||||
self.resolve_error(pattern.span,
|
||||
format!("identifier `{}` is bound \
|
||||
&format!("identifier `{}` is bound \
|
||||
more than once in the same \
|
||||
pattern",
|
||||
token::get_ident(ident)).index(&FullRange));
|
||||
token::get_ident(ident))[]);
|
||||
}
|
||||
// Else, not bound in the same pattern: do
|
||||
// nothing.
|
||||
@ -3791,7 +3790,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
def: {:?}", result);
|
||||
let msg = format!("`{}` does not name a structure",
|
||||
self.path_names_to_string(path));
|
||||
self.resolve_error(path.span, msg.index(&FullRange));
|
||||
self.resolve_error(path.span, &msg[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3852,8 +3851,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
Failed(err) => {
|
||||
match err {
|
||||
Some((span, msg)) => {
|
||||
self.resolve_error(span, format!("failed to resolve: {}",
|
||||
msg).index(&FullRange));
|
||||
self.resolve_error(span, &format!("failed to resolve: {}",
|
||||
msg)[]);
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
@ -4048,7 +4047,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let last_private;
|
||||
let module = self.current_module.clone();
|
||||
match self.resolve_module_path(module,
|
||||
module_path.index(&FullRange),
|
||||
&module_path[],
|
||||
UseLexicalScope,
|
||||
path.span,
|
||||
PathSearch) {
|
||||
@ -4062,8 +4061,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
}
|
||||
};
|
||||
|
||||
self.resolve_error(span, format!("failed to resolve. {}",
|
||||
msg).index(&FullRange));
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]);
|
||||
return None;
|
||||
}
|
||||
Indeterminate => panic!("indeterminate unexpected"),
|
||||
@ -4106,7 +4105,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let containing_module;
|
||||
let last_private;
|
||||
match self.resolve_module_path_from_root(root_module,
|
||||
module_path.index(&FullRange),
|
||||
&module_path[],
|
||||
0,
|
||||
path.span,
|
||||
PathSearch,
|
||||
@ -4116,13 +4115,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
Some((span, msg)) => (span, msg),
|
||||
None => {
|
||||
let msg = format!("Use of undeclared module `::{}`",
|
||||
self.names_to_string(module_path.index(&FullRange)));
|
||||
self.names_to_string(&module_path[]));
|
||||
(path.span, msg)
|
||||
}
|
||||
};
|
||||
|
||||
self.resolve_error(span, format!("failed to resolve. {}",
|
||||
msg).index(&FullRange));
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]);
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -4163,7 +4162,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
}
|
||||
TypeNS => {
|
||||
let name = ident.name;
|
||||
self.search_ribs(self.type_ribs.index(&FullRange), name, span)
|
||||
self.search_ribs(&self.type_ribs[], name, span)
|
||||
}
|
||||
};
|
||||
|
||||
@ -4217,8 +4216,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
Failed(err) => {
|
||||
match err {
|
||||
Some((span, msg)) =>
|
||||
self.resolve_error(span, format!("failed to resolve. {}",
|
||||
msg).index(&FullRange)),
|
||||
self.resolve_error(span, &format!("failed to resolve. {}",
|
||||
msg)[]),
|
||||
None => ()
|
||||
}
|
||||
|
||||
@ -4275,7 +4274,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
}
|
||||
} else {
|
||||
match this.resolve_module_path(root,
|
||||
name_path.index(&FullRange),
|
||||
&name_path[],
|
||||
UseLexicalScope,
|
||||
span,
|
||||
PathSearch) {
|
||||
@ -4313,7 +4312,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::<Vec<_>>();
|
||||
|
||||
// Look for a method in the current self type's impl module.
|
||||
match get_module(self, path.span, name_path.index(&FullRange)) {
|
||||
match get_module(self, path.span, &name_path[]) {
|
||||
Some(module) => match module.children.borrow().get(&name) {
|
||||
Some(binding) => {
|
||||
let p_str = self.path_names_to_string(&path);
|
||||
@ -4524,7 +4523,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
def: {:?}", result);
|
||||
let msg = format!("`{}` does not name a structure",
|
||||
self.path_names_to_string(path));
|
||||
self.resolve_error(path.span, msg.index(&FullRange));
|
||||
self.resolve_error(path.span, &msg[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -4584,8 +4583,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
None => {
|
||||
self.resolve_error(
|
||||
expr.span,
|
||||
format!("use of undeclared label `{}`",
|
||||
token::get_ident(label)).index(&FullRange))
|
||||
&format!("use of undeclared label `{}`",
|
||||
token::get_ident(label))[])
|
||||
}
|
||||
Some(DlDef(def @ DefLabel(_))) => {
|
||||
// Since this def is a label, it is never read.
|
||||
@ -4720,11 +4719,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
// the same conclusion! - nmatsakis
|
||||
Occupied(entry) => if def != *entry.get() {
|
||||
self.session
|
||||
.bug(format!("node_id {} resolved first to {:?} and \
|
||||
.bug(&format!("node_id {} resolved first to {:?} and \
|
||||
then {:?}",
|
||||
node_id,
|
||||
*entry.get(),
|
||||
def).index(&FullRange));
|
||||
def)[]);
|
||||
},
|
||||
Vacant(entry) => { entry.insert(def); },
|
||||
}
|
||||
@ -4738,9 +4737,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
BindByValue(_) => {}
|
||||
BindByRef(..) => {
|
||||
self.resolve_error(pat.span,
|
||||
format!("cannot use `ref` binding mode \
|
||||
&format!("cannot use `ref` binding mode \
|
||||
with {}",
|
||||
descr).index(&FullRange));
|
||||
descr)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4775,8 +4774,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
if names.len() == 0 {
|
||||
return "???".to_string();
|
||||
}
|
||||
self.names_to_string(names.into_iter().rev()
|
||||
.collect::<Vec<ast::Name>>().index(&FullRange))
|
||||
self.names_to_string(&names.into_iter().rev()
|
||||
.collect::<Vec<ast::Name>>()[])
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // useful for debugging
|
||||
|
@ -128,7 +128,7 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||
attrs: &[ast::Attribute],
|
||||
input: &Input) -> String {
|
||||
let validate = |&: s: String, span: Option<Span>| {
|
||||
creader::validate_crate_name(sess, s.index(&FullRange), span);
|
||||
creader::validate_crate_name(sess, &s[], span);
|
||||
s
|
||||
};
|
||||
|
||||
@ -146,7 +146,7 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||
let msg = format!("--crate-name and #[crate_name] are \
|
||||
required to match, but `{}` != `{}`",
|
||||
s, name);
|
||||
sess.span_err(attr.span, msg.index(&FullRange));
|
||||
sess.span_err(attr.span, &msg[]);
|
||||
}
|
||||
}
|
||||
return validate(s.clone(), None);
|
||||
@ -192,17 +192,17 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
// to be independent of one another in the crate.
|
||||
|
||||
symbol_hasher.reset();
|
||||
symbol_hasher.input_str(link_meta.crate_name.index(&FullRange));
|
||||
symbol_hasher.input_str(&link_meta.crate_name[]);
|
||||
symbol_hasher.input_str("-");
|
||||
symbol_hasher.input_str(link_meta.crate_hash.as_str());
|
||||
for meta in tcx.sess.crate_metadata.borrow().iter() {
|
||||
symbol_hasher.input_str(meta.index(&FullRange));
|
||||
symbol_hasher.input_str(&meta[]);
|
||||
}
|
||||
symbol_hasher.input_str("-");
|
||||
symbol_hasher.input_str(encoder::encoded_ty(tcx, t).index(&FullRange));
|
||||
symbol_hasher.input_str(&encoder::encoded_ty(tcx, t)[]);
|
||||
// Prefix with 'h' so that it never blends into adjacent digits
|
||||
let mut hash = String::from_str("h");
|
||||
hash.push_str(truncated_hash_result(symbol_hasher).index(&FullRange));
|
||||
hash.push_str(&truncated_hash_result(symbol_hasher)[]);
|
||||
hash
|
||||
}
|
||||
|
||||
@ -251,7 +251,7 @@ pub fn sanitize(s: &str) -> String {
|
||||
let mut tstr = String::new();
|
||||
for c in c.escape_unicode() { tstr.push(c) }
|
||||
result.push('$');
|
||||
result.push_str(tstr.index(&(1..)));
|
||||
result.push_str(&tstr[1..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -260,7 +260,7 @@ pub fn sanitize(s: &str) -> String {
|
||||
if result.len() > 0u &&
|
||||
result.as_bytes()[0] != '_' as u8 &&
|
||||
! (result.as_bytes()[0] as char).is_xid_start() {
|
||||
return format!("_{}", result.index(&FullRange));
|
||||
return format!("_{}", &result[]);
|
||||
}
|
||||
|
||||
return result;
|
||||
@ -286,12 +286,12 @@ pub fn mangle<PI: Iterator<Item=PathElem>>(mut path: PI,
|
||||
|
||||
fn push(n: &mut String, s: &str) {
|
||||
let sani = sanitize(s);
|
||||
n.push_str(format!("{}{}", sani.len(), sani).index(&FullRange));
|
||||
n.push_str(&format!("{}{}", sani.len(), sani)[]);
|
||||
}
|
||||
|
||||
// First, connect each component with <len, name> pairs.
|
||||
for e in path {
|
||||
push(&mut n, token::get_name(e.name()).get().index(&FullRange))
|
||||
push(&mut n, &token::get_name(e.name()).get()[])
|
||||
}
|
||||
|
||||
match hash {
|
||||
@ -329,17 +329,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl
|
||||
hash.push(EXTRA_CHARS.as_bytes()[extra2] as char);
|
||||
hash.push(EXTRA_CHARS.as_bytes()[extra3] as char);
|
||||
|
||||
exported_name(path, hash.index(&FullRange))
|
||||
exported_name(path, &hash[])
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
t: Ty<'tcx>,
|
||||
name: &str) -> String {
|
||||
let s = ppaux::ty_to_string(ccx.tcx(), t);
|
||||
let path = [PathName(token::intern(s.index(&FullRange))),
|
||||
let path = [PathName(token::intern(&s[])),
|
||||
gensym_name(name)];
|
||||
let hash = get_symbol_hash(ccx, t);
|
||||
mangle(ast_map::Values(path.iter()), Some(hash.index(&FullRange)))
|
||||
mangle(ast_map::Values(path.iter()), Some(&hash[]))
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
|
||||
@ -357,9 +357,9 @@ pub fn remove(sess: &Session, path: &Path) {
|
||||
match fs::unlink(path) {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to remove {}: {}",
|
||||
sess.err(&format!("failed to remove {}: {}",
|
||||
path.display(),
|
||||
e).index(&FullRange));
|
||||
e)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -373,8 +373,8 @@ pub fn link_binary(sess: &Session,
|
||||
let mut out_filenames = Vec::new();
|
||||
for &crate_type in sess.crate_types.borrow().iter() {
|
||||
if invalid_output_for_target(sess, crate_type) {
|
||||
sess.bug(format!("invalid output type `{:?}` for target os `{}`",
|
||||
crate_type, sess.opts.target_triple).index(&FullRange));
|
||||
sess.bug(&format!("invalid output type `{:?}` for target os `{}`",
|
||||
crate_type, sess.opts.target_triple)[]);
|
||||
}
|
||||
let out_file = link_binary_output(sess, trans, crate_type, outputs,
|
||||
crate_name);
|
||||
@ -439,8 +439,8 @@ pub fn filename_for_input(sess: &Session,
|
||||
out_filename.with_filename(format!("lib{}.rlib", libname))
|
||||
}
|
||||
config::CrateTypeDylib => {
|
||||
let (prefix, suffix) = (sess.target.target.options.dll_prefix.index(&FullRange),
|
||||
sess.target.target.options.dll_suffix.index(&FullRange));
|
||||
let (prefix, suffix) = (&sess.target.target.options.dll_prefix[],
|
||||
&sess.target.target.options.dll_suffix[]);
|
||||
out_filename.with_filename(format!("{}{}{}",
|
||||
prefix,
|
||||
libname,
|
||||
@ -450,7 +450,7 @@ pub fn filename_for_input(sess: &Session,
|
||||
out_filename.with_filename(format!("lib{}.a", libname))
|
||||
}
|
||||
config::CrateTypeExecutable => {
|
||||
let suffix = sess.target.target.options.exe_suffix.index(&FullRange);
|
||||
let suffix = &sess.target.target.options.exe_suffix[];
|
||||
out_filename.with_filename(format!("{}{}", libname, suffix))
|
||||
}
|
||||
}
|
||||
@ -477,14 +477,14 @@ fn link_binary_output(sess: &Session,
|
||||
let obj_is_writeable = is_writeable(&obj_filename);
|
||||
let out_is_writeable = is_writeable(&out_filename);
|
||||
if !out_is_writeable {
|
||||
sess.fatal(format!("output file {} is not writeable -- check its \
|
||||
sess.fatal(&format!("output file {} is not writeable -- check its \
|
||||
permissions.",
|
||||
out_filename.display()).index(&FullRange));
|
||||
out_filename.display())[]);
|
||||
}
|
||||
else if !obj_is_writeable {
|
||||
sess.fatal(format!("object file {} is not writeable -- check its \
|
||||
sess.fatal(&format!("object file {} is not writeable -- check its \
|
||||
permissions.",
|
||||
obj_filename.display()).index(&FullRange));
|
||||
obj_filename.display())[]);
|
||||
}
|
||||
|
||||
match crate_type {
|
||||
@ -539,7 +539,7 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() {
|
||||
match kind {
|
||||
cstore::NativeStatic => {
|
||||
ab.add_native_library(l.index(&FullRange)).unwrap();
|
||||
ab.add_native_library(&l[]).unwrap();
|
||||
}
|
||||
cstore::NativeFramework | cstore::NativeUnknown => {}
|
||||
}
|
||||
@ -586,13 +586,13 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
// the same filename for metadata (stomping over one another)
|
||||
let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir");
|
||||
let metadata = tmpdir.path().join(METADATA_FILENAME);
|
||||
match fs::File::create(&metadata).write(trans.metadata
|
||||
.index(&FullRange)) {
|
||||
match fs::File::create(&metadata).write(&trans.metadata
|
||||
[]) {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to write {}: {}",
|
||||
sess.err(&format!("failed to write {}: {}",
|
||||
metadata.display(),
|
||||
e).index(&FullRange));
|
||||
e)[]);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -610,25 +610,25 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
// was exactly 16 bytes.
|
||||
let bc_filename = obj_filename.with_extension(format!("{}.bc", i).as_slice());
|
||||
let bc_deflated_filename = obj_filename.with_extension(
|
||||
format!("{}.bytecode.deflate", i).index(&FullRange));
|
||||
&format!("{}.bytecode.deflate", i)[]);
|
||||
|
||||
let bc_data = match fs::File::open(&bc_filename).read_to_end() {
|
||||
Ok(buffer) => buffer,
|
||||
Err(e) => sess.fatal(format!("failed to read bytecode: {}",
|
||||
e).index(&FullRange))
|
||||
Err(e) => sess.fatal(&format!("failed to read bytecode: {}",
|
||||
e)[])
|
||||
};
|
||||
|
||||
let bc_data_deflated = match flate::deflate_bytes(bc_data.index(&FullRange)) {
|
||||
let bc_data_deflated = match flate::deflate_bytes(&bc_data[]) {
|
||||
Some(compressed) => compressed,
|
||||
None => sess.fatal(format!("failed to compress bytecode from {}",
|
||||
bc_filename.display()).index(&FullRange))
|
||||
None => sess.fatal(&format!("failed to compress bytecode from {}",
|
||||
bc_filename.display())[])
|
||||
};
|
||||
|
||||
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
|
||||
Ok(file) => file,
|
||||
Err(e) => {
|
||||
sess.fatal(format!("failed to create compressed bytecode \
|
||||
file: {}", e).index(&FullRange))
|
||||
sess.fatal(&format!("failed to create compressed bytecode \
|
||||
file: {}", e)[])
|
||||
}
|
||||
};
|
||||
|
||||
@ -636,8 +636,8 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
bc_data_deflated.as_slice()) {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to write compressed bytecode: \
|
||||
{}", e).index(&FullRange));
|
||||
sess.err(&format!("failed to write compressed bytecode: \
|
||||
{}", e)[]);
|
||||
sess.abort_if_errors()
|
||||
}
|
||||
};
|
||||
@ -677,7 +677,7 @@ fn write_rlib_bytecode_object_v1<T: Writer>(writer: &mut T,
|
||||
try! { writer.write(RLIB_BYTECODE_OBJECT_MAGIC) };
|
||||
try! { writer.write_le_u32(1) };
|
||||
try! { writer.write_le_u64(bc_data_deflated_size) };
|
||||
try! { writer.write(bc_data_deflated.index(&FullRange)) };
|
||||
try! { writer.write(&bc_data_deflated[]) };
|
||||
|
||||
let number_of_bytes_written_so_far =
|
||||
RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id
|
||||
@ -727,12 +727,12 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
|
||||
let ref name = sess.cstore.get_crate_data(cnum).name;
|
||||
let p = match *path {
|
||||
Some(ref p) => p.clone(), None => {
|
||||
sess.err(format!("could not find rlib for: `{}`",
|
||||
name).index(&FullRange));
|
||||
sess.err(&format!("could not find rlib for: `{}`",
|
||||
name)[]);
|
||||
continue
|
||||
}
|
||||
};
|
||||
ab.add_rlib(&p, name.index(&FullRange), sess.lto()).unwrap();
|
||||
ab.add_rlib(&p, &name[], sess.lto()).unwrap();
|
||||
|
||||
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
|
||||
all_native_libs.extend(native_libs.into_iter());
|
||||
@ -754,7 +754,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
|
||||
cstore::NativeUnknown => "library",
|
||||
cstore::NativeFramework => "framework",
|
||||
};
|
||||
sess.note(format!("{}: {}", name, *lib).index(&FullRange));
|
||||
sess.note(&format!("{}: {}", name, *lib)[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -768,12 +768,12 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
|
||||
// The invocations of cc share some flags across platforms
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(pname.index(&FullRange));
|
||||
let mut cmd = Command::new(&pname[]);
|
||||
|
||||
cmd.args(sess.target.target.options.pre_link_args.index(&FullRange));
|
||||
cmd.args(&sess.target.target.options.pre_link_args[]);
|
||||
link_args(&mut cmd, sess, dylib, tmpdir.path(),
|
||||
trans, obj_filename, out_filename);
|
||||
cmd.args(sess.target.target.options.post_link_args.index(&FullRange));
|
||||
cmd.args(&sess.target.target.options.post_link_args[]);
|
||||
if !sess.target.target.options.no_compiler_rt {
|
||||
cmd.arg("-lcompiler-rt");
|
||||
}
|
||||
@ -791,22 +791,22 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
match prog {
|
||||
Ok(prog) => {
|
||||
if !prog.status.success() {
|
||||
sess.err(format!("linking with `{}` failed: {}",
|
||||
sess.err(&format!("linking with `{}` failed: {}",
|
||||
pname,
|
||||
prog.status).index(&FullRange));
|
||||
sess.note(format!("{}", &cmd).index(&FullRange));
|
||||
prog.status)[]);
|
||||
sess.note(&format!("{}", &cmd)[]);
|
||||
let mut output = prog.error.clone();
|
||||
output.push_all(prog.output.index(&FullRange));
|
||||
sess.note(str::from_utf8(output.index(&FullRange)).unwrap());
|
||||
output.push_all(&prog.output[]);
|
||||
sess.note(str::from_utf8(&output[]).unwrap());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap());
|
||||
debug!("linker stdout:\n{}", String::from_utf8(prog.output).unwrap());
|
||||
},
|
||||
Err(e) => {
|
||||
sess.err(format!("could not exec the linker `{}`: {}",
|
||||
sess.err(&format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e).index(&FullRange));
|
||||
e)[]);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -818,7 +818,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
match Command::new("dsymutil").arg(out_filename).output() {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to run dsymutil: {}", e).index(&FullRange));
|
||||
sess.err(&format!("failed to run dsymutil: {}", e)[]);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -867,7 +867,7 @@ fn link_args(cmd: &mut Command,
|
||||
|
||||
let mut v = b"-Wl,-force_load,".to_vec();
|
||||
v.push_all(morestack.as_vec());
|
||||
cmd.arg(v.index(&FullRange));
|
||||
cmd.arg(&v[]);
|
||||
} else {
|
||||
cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]);
|
||||
}
|
||||
@ -992,7 +992,7 @@ fn link_args(cmd: &mut Command,
|
||||
if sess.opts.cg.rpath {
|
||||
let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec();
|
||||
v.push_all(out_filename.filename().unwrap());
|
||||
cmd.arg(v.index(&FullRange));
|
||||
cmd.arg(&v[]);
|
||||
}
|
||||
} else {
|
||||
cmd.arg("-shared");
|
||||
@ -1004,7 +1004,7 @@ fn link_args(cmd: &mut Command,
|
||||
// addl_lib_search_paths
|
||||
if sess.opts.cg.rpath {
|
||||
let sysroot = sess.sysroot();
|
||||
let target_triple = sess.opts.target_triple.index(&FullRange);
|
||||
let target_triple = &sess.opts.target_triple[];
|
||||
let get_install_prefix_lib_path = |:| {
|
||||
let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
|
||||
let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
|
||||
@ -1021,14 +1021,14 @@ fn link_args(cmd: &mut Command,
|
||||
get_install_prefix_lib_path: get_install_prefix_lib_path,
|
||||
realpath: ::util::fs::realpath
|
||||
};
|
||||
cmd.args(rpath::get_rpath_flags(rpath_config).index(&FullRange));
|
||||
cmd.args(&rpath::get_rpath_flags(rpath_config)[]);
|
||||
}
|
||||
|
||||
// Finally add all the linker arguments provided on the command line along
|
||||
// with any #[link_args] attributes found inside the crate
|
||||
let empty = Vec::new();
|
||||
cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty).index(&FullRange));
|
||||
cmd.args(used_link_args.index(&FullRange));
|
||||
cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]);
|
||||
cmd.args(&used_link_args[]);
|
||||
}
|
||||
|
||||
// # Native library linking
|
||||
@ -1082,14 +1082,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) {
|
||||
} else {
|
||||
// -force_load is the OSX equivalent of --whole-archive, but it
|
||||
// involves passing the full path to the library to link.
|
||||
let lib = archive::find_library(l.index(&FullRange),
|
||||
let lib = archive::find_library(&l[],
|
||||
sess.target.target.options.staticlib_prefix.as_slice(),
|
||||
sess.target.target.options.staticlib_suffix.as_slice(),
|
||||
search_path.index(&FullRange),
|
||||
&search_path[],
|
||||
&sess.diagnostic().handler);
|
||||
let mut v = b"-Wl,-force_load,".to_vec();
|
||||
v.push_all(lib.as_vec());
|
||||
cmd.arg(v.index(&FullRange));
|
||||
cmd.arg(&v[]);
|
||||
}
|
||||
}
|
||||
if takes_hints {
|
||||
@ -1102,7 +1102,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) {
|
||||
cmd.arg(format!("-l{}", l));
|
||||
}
|
||||
cstore::NativeFramework => {
|
||||
cmd.arg("-framework").arg(l.index(&FullRange));
|
||||
cmd.arg("-framework").arg(&l[]);
|
||||
}
|
||||
cstore::NativeStatic => unreachable!(),
|
||||
}
|
||||
@ -1158,7 +1158,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
// Converts a library file-stem into a cc -l argument
|
||||
fn unlib<'a>(config: &config::Config, stem: &'a [u8]) -> &'a [u8] {
|
||||
if stem.starts_with("lib".as_bytes()) && !config.target.options.is_like_windows {
|
||||
stem.index(&(3..))
|
||||
&stem[3..]
|
||||
} else {
|
||||
stem
|
||||
}
|
||||
@ -1183,18 +1183,18 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
// against the archive.
|
||||
if sess.lto() {
|
||||
let name = cratepath.filename_str().unwrap();
|
||||
let name = name.index(&(3..(name.len() - 5))); // chop off lib/.rlib
|
||||
let name = &name[3..(name.len() - 5)]; // chop off lib/.rlib
|
||||
time(sess.time_passes(),
|
||||
format!("altering {}.rlib", name).index(&FullRange),
|
||||
&format!("altering {}.rlib", name)[],
|
||||
(), |()| {
|
||||
let dst = tmpdir.join(cratepath.filename().unwrap());
|
||||
match fs::copy(&cratepath, &dst) {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to copy {} to {}: {}",
|
||||
sess.err(&format!("failed to copy {} to {}: {}",
|
||||
cratepath.display(),
|
||||
dst.display(),
|
||||
e).index(&FullRange));
|
||||
e)[]);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -1204,9 +1204,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
match fs::chmod(&dst, io::USER_READ | io::USER_WRITE) {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to chmod {} when preparing \
|
||||
sess.err(&format!("failed to chmod {} when preparing \
|
||||
for LTO: {}", dst.display(),
|
||||
e).index(&FullRange));
|
||||
e)[]);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -1220,9 +1220,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
maybe_ar_prog: sess.opts.cg.ar.clone()
|
||||
};
|
||||
let mut archive = Archive::open(config);
|
||||
archive.remove_file(format!("{}.o", name).index(&FullRange));
|
||||
archive.remove_file(&format!("{}.o", name)[]);
|
||||
let files = archive.files();
|
||||
if files.iter().any(|s| s.index(&FullRange).ends_with(".o")) {
|
||||
if files.iter().any(|s| s[].ends_with(".o")) {
|
||||
cmd.arg(dst);
|
||||
}
|
||||
});
|
||||
@ -1244,7 +1244,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
|
||||
let mut v = "-l".as_bytes().to_vec();
|
||||
v.push_all(unlib(&sess.target, cratepath.filestem().unwrap()));
|
||||
cmd.arg(v.index(&FullRange));
|
||||
cmd.arg(&v[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1286,7 +1286,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) {
|
||||
}
|
||||
cstore::NativeFramework => {
|
||||
cmd.arg("-framework");
|
||||
cmd.arg(lib.index(&FullRange));
|
||||
cmd.arg(&lib[]);
|
||||
}
|
||||
cstore::NativeStatic => {
|
||||
sess.bug("statics shouldn't be propagated");
|
||||
|
@ -53,30 +53,30 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
let path = match path {
|
||||
Some(p) => p,
|
||||
None => {
|
||||
sess.fatal(format!("could not find rlib for: `{}`",
|
||||
name).index(&FullRange));
|
||||
sess.fatal(&format!("could not find rlib for: `{}`",
|
||||
name)[]);
|
||||
}
|
||||
};
|
||||
|
||||
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
|
||||
let file = path.filename_str().unwrap();
|
||||
let file = file.index(&(3..(file.len() - 5))); // chop off lib/.rlib
|
||||
let file = &file[3..(file.len() - 5)]; // chop off lib/.rlib
|
||||
debug!("reading {}", file);
|
||||
for i in iter::count(0u, 1) {
|
||||
let bc_encoded = time(sess.time_passes(),
|
||||
format!("check for {}.{}.bytecode.deflate", name, i).as_slice(),
|
||||
(),
|
||||
|_| {
|
||||
archive.read(format!("{}.{}.bytecode.deflate",
|
||||
file, i).index(&FullRange))
|
||||
archive.read(&format!("{}.{}.bytecode.deflate",
|
||||
file, i)[])
|
||||
});
|
||||
let bc_encoded = match bc_encoded {
|
||||
Some(data) => data,
|
||||
None => {
|
||||
if i == 0 {
|
||||
// No bitcode was found at all.
|
||||
sess.fatal(format!("missing compressed bytecode in {}",
|
||||
path.display()).index(&FullRange));
|
||||
sess.fatal(&format!("missing compressed bytecode in {}",
|
||||
path.display())[]);
|
||||
}
|
||||
// No more bitcode files to read.
|
||||
break;
|
||||
@ -91,20 +91,20 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
if version == 1 {
|
||||
// The only version existing so far
|
||||
let data_size = extract_compressed_bytecode_size_v1(bc_encoded);
|
||||
let compressed_data = bc_encoded.index(&(
|
||||
let compressed_data = &bc_encoded[
|
||||
link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET..
|
||||
(link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint)));
|
||||
(link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint)];
|
||||
|
||||
match flate::inflate_bytes(compressed_data) {
|
||||
Some(inflated) => inflated,
|
||||
None => {
|
||||
sess.fatal(format!("failed to decompress bc of `{}`",
|
||||
name).index(&FullRange))
|
||||
sess.fatal(&format!("failed to decompress bc of `{}`",
|
||||
name)[])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sess.fatal(format!("Unsupported bytecode format version {}",
|
||||
version).index(&FullRange))
|
||||
sess.fatal(&format!("Unsupported bytecode format version {}",
|
||||
version)[])
|
||||
}
|
||||
})
|
||||
} else {
|
||||
@ -114,8 +114,8 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
match flate::inflate_bytes(bc_encoded) {
|
||||
Some(bc) => bc,
|
||||
None => {
|
||||
sess.fatal(format!("failed to decompress bc of `{}`",
|
||||
name).index(&FullRange))
|
||||
sess.fatal(&format!("failed to decompress bc of `{}`",
|
||||
name)[])
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -124,7 +124,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
let ptr = bc_decoded.as_slice().as_ptr();
|
||||
debug!("linking {}, part {}", name, i);
|
||||
time(sess.time_passes(),
|
||||
format!("ll link {}.{}", name, i).index(&FullRange),
|
||||
&format!("ll link {}.{}", name, i)[],
|
||||
(),
|
||||
|()| unsafe {
|
||||
if !llvm::LLVMRustLinkInExternalBitcode(llmod,
|
||||
@ -132,7 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
bc_decoded.len() as libc::size_t) {
|
||||
write::llvm_err(sess.diagnostic().handler(),
|
||||
format!("failed to load bc of `{}`",
|
||||
name.index(&FullRange)));
|
||||
&name[]));
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -186,7 +186,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
fn is_versioned_bytecode_format(bc: &[u8]) -> bool {
|
||||
let magic_id_byte_count = link::RLIB_BYTECODE_OBJECT_MAGIC.len();
|
||||
return bc.len() > magic_id_byte_count &&
|
||||
bc.index(&(0..magic_id_byte_count)) == link::RLIB_BYTECODE_OBJECT_MAGIC;
|
||||
&bc[0..magic_id_byte_count] == link::RLIB_BYTECODE_OBJECT_MAGIC;
|
||||
}
|
||||
|
||||
fn extract_bytecode_format_version(bc: &[u8]) -> u32 {
|
||||
@ -198,8 +198,7 @@ fn extract_compressed_bytecode_size_v1(bc: &[u8]) -> u64 {
|
||||
}
|
||||
|
||||
fn read_from_le_bytes<T: Int>(bytes: &[u8], position_in_bytes: uint) -> T {
|
||||
let byte_data = bytes.index(&(position_in_bytes..
|
||||
(position_in_bytes + mem::size_of::<T>())));
|
||||
let byte_data = &bytes[position_in_bytes..(position_in_bytes + mem::size_of::<T>())];
|
||||
let data = unsafe {
|
||||
*(byte_data.as_ptr() as *const T)
|
||||
};
|
||||
|
@ -47,14 +47,14 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! {
|
||||
unsafe {
|
||||
let cstr = llvm::LLVMRustGetLastError();
|
||||
if cstr == ptr::null() {
|
||||
handler.fatal(msg.index(&FullRange));
|
||||
handler.fatal(&msg[]);
|
||||
} else {
|
||||
let err = ffi::c_str_to_bytes(&cstr);
|
||||
let err = String::from_utf8_lossy(err.as_slice()).to_string();
|
||||
libc::free(cstr as *mut _);
|
||||
handler.fatal(format!("{}: {}",
|
||||
msg.index(&FullRange),
|
||||
err.index(&FullRange)).index(&FullRange));
|
||||
handler.fatal(&format!("{}: {}",
|
||||
&msg[],
|
||||
&err[])[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -104,13 +104,13 @@ impl SharedEmitter {
|
||||
match diag.code {
|
||||
Some(ref code) => {
|
||||
handler.emit_with_code(None,
|
||||
diag.msg.index(&FullRange),
|
||||
code.index(&FullRange),
|
||||
&diag.msg[],
|
||||
&code[],
|
||||
diag.lvl);
|
||||
},
|
||||
None => {
|
||||
handler.emit(None,
|
||||
diag.msg.index(&FullRange),
|
||||
&diag.msg[],
|
||||
diag.lvl);
|
||||
},
|
||||
}
|
||||
@ -165,8 +165,8 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel {
|
||||
|
||||
fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
let reloc_model_arg = match sess.opts.cg.relocation_model {
|
||||
Some(ref s) => s.index(&FullRange),
|
||||
None => sess.target.target.options.relocation_model.index(&FullRange)
|
||||
Some(ref s) => &s[],
|
||||
None => &sess.target.target.options.relocation_model[]
|
||||
};
|
||||
let reloc_model = match reloc_model_arg {
|
||||
"pic" => llvm::RelocPIC,
|
||||
@ -174,10 +174,10 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
"default" => llvm::RelocDefault,
|
||||
"dynamic-no-pic" => llvm::RelocDynamicNoPic,
|
||||
_ => {
|
||||
sess.err(format!("{:?} is not a valid relocation mode",
|
||||
sess.err(&format!("{:?} is not a valid relocation mode",
|
||||
sess.opts
|
||||
.cg
|
||||
.relocation_model).index(&FullRange));
|
||||
.relocation_model)[]);
|
||||
sess.abort_if_errors();
|
||||
unreachable!();
|
||||
}
|
||||
@ -198,8 +198,8 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
let fdata_sections = ffunction_sections;
|
||||
|
||||
let code_model_arg = match sess.opts.cg.code_model {
|
||||
Some(ref s) => s.index(&FullRange),
|
||||
None => sess.target.target.options.code_model.index(&FullRange)
|
||||
Some(ref s) => &s[],
|
||||
None => &sess.target.target.options.code_model[]
|
||||
};
|
||||
|
||||
let code_model = match code_model_arg {
|
||||
@ -209,16 +209,16 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
"medium" => llvm::CodeModelMedium,
|
||||
"large" => llvm::CodeModelLarge,
|
||||
_ => {
|
||||
sess.err(format!("{:?} is not a valid code model",
|
||||
sess.err(&format!("{:?} is not a valid code model",
|
||||
sess.opts
|
||||
.cg
|
||||
.code_model).index(&FullRange));
|
||||
.code_model)[]);
|
||||
sess.abort_if_errors();
|
||||
unreachable!();
|
||||
}
|
||||
};
|
||||
|
||||
let triple = sess.target.target.llvm_target.index(&FullRange);
|
||||
let triple = &sess.target.target.llvm_target[];
|
||||
|
||||
let tm = unsafe {
|
||||
let triple = CString::from_slice(triple.as_bytes());
|
||||
@ -350,13 +350,13 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef,
|
||||
match cgcx.lto_ctxt {
|
||||
Some((sess, _)) => {
|
||||
sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info {
|
||||
Some(ei) => sess.span_err(ei.call_site, msg.index(&FullRange)),
|
||||
None => sess.err(msg.index(&FullRange)),
|
||||
Some(ei) => sess.span_err(ei.call_site, &msg[]),
|
||||
None => sess.err(&msg[]),
|
||||
});
|
||||
}
|
||||
|
||||
None => {
|
||||
cgcx.handler.err(msg.index(&FullRange));
|
||||
cgcx.handler.err(&msg[]);
|
||||
cgcx.handler.note("build without -C codegen-units for more exact errors");
|
||||
}
|
||||
}
|
||||
@ -518,14 +518,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
}
|
||||
|
||||
if config.emit_asm {
|
||||
let path = output_names.with_extension(format!("{}.s", name_extra).index(&FullRange));
|
||||
let path = output_names.with_extension(&format!("{}.s", name_extra)[]);
|
||||
with_codegen(tm, llmod, config.no_builtins, |cpm| {
|
||||
write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType);
|
||||
});
|
||||
}
|
||||
|
||||
if config.emit_obj {
|
||||
let path = output_names.with_extension(format!("{}.o", name_extra).index(&FullRange));
|
||||
let path = output_names.with_extension(&format!("{}.o", name_extra)[]);
|
||||
with_codegen(tm, llmod, config.no_builtins, |cpm| {
|
||||
write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType);
|
||||
});
|
||||
@ -639,7 +639,7 @@ pub fn run_passes(sess: &Session,
|
||||
|
||||
// Process the work items, optionally using worker threads.
|
||||
if sess.opts.cg.codegen_units == 1 {
|
||||
run_work_singlethreaded(sess, trans.reachable.index(&FullRange), work_items);
|
||||
run_work_singlethreaded(sess, &trans.reachable[], work_items);
|
||||
} else {
|
||||
run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units);
|
||||
}
|
||||
@ -666,8 +666,8 @@ pub fn run_passes(sess: &Session,
|
||||
if crate_output.single_output_file.is_some() {
|
||||
// 2) Multiple codegen units, with `-o some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.warn(format!("ignoring -o because multiple .{} files were produced",
|
||||
ext).index(&FullRange));
|
||||
sess.warn(&format!("ignoring -o because multiple .{} files were produced",
|
||||
ext)[]);
|
||||
} else {
|
||||
// 3) Multiple codegen units, but no `-o some_name`. We
|
||||
// just leave the `foo.0.x` files in place.
|
||||
@ -700,20 +700,20 @@ pub fn run_passes(sess: &Session,
|
||||
};
|
||||
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(pname.index(&FullRange));
|
||||
let mut cmd = Command::new(&pname[]);
|
||||
|
||||
cmd.args(sess.target.target.options.pre_link_args.index(&FullRange));
|
||||
cmd.args(&sess.target.target.options.pre_link_args[]);
|
||||
cmd.arg("-nostdlib");
|
||||
|
||||
for index in range(0, trans.modules.len()) {
|
||||
cmd.arg(crate_output.with_extension(format!("{}.o", index).index(&FullRange)));
|
||||
cmd.arg(crate_output.with_extension(&format!("{}.o", index)[]));
|
||||
}
|
||||
|
||||
cmd.arg("-r")
|
||||
.arg("-o")
|
||||
.arg(windows_output_path.as_ref().unwrap_or(output_path));
|
||||
|
||||
cmd.args(sess.target.target.options.post_link_args.index(&FullRange));
|
||||
cmd.args(&sess.target.target.options.post_link_args[]);
|
||||
|
||||
if (sess.opts.debugging_opts & config::PRINT_LINK_ARGS) != 0 {
|
||||
println!("{}", &cmd);
|
||||
@ -725,15 +725,15 @@ pub fn run_passes(sess: &Session,
|
||||
match cmd.status() {
|
||||
Ok(status) => {
|
||||
if !status.success() {
|
||||
sess.err(format!("linking of {} with `{}` failed",
|
||||
output_path.display(), cmd).index(&FullRange));
|
||||
sess.err(&format!("linking of {} with `{}` failed",
|
||||
output_path.display(), cmd)[]);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
sess.err(format!("could not exec the linker `{}`: {}",
|
||||
sess.err(&format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e).index(&FullRange));
|
||||
e)[]);
|
||||
sess.abort_if_errors();
|
||||
},
|
||||
}
|
||||
@ -818,12 +818,12 @@ pub fn run_passes(sess: &Session,
|
||||
for i in range(0, trans.modules.len()) {
|
||||
if modules_config.emit_obj {
|
||||
let ext = format!("{}.o", i);
|
||||
remove(sess, &crate_output.with_extension(ext.index(&FullRange)));
|
||||
remove(sess, &crate_output.with_extension(&ext[]));
|
||||
}
|
||||
|
||||
if modules_config.emit_bc && !keep_numbered_bitcode {
|
||||
let ext = format!("{}.bc", i);
|
||||
remove(sess, &crate_output.with_extension(ext.index(&FullRange)));
|
||||
remove(sess, &crate_output.with_extension(&ext[]));
|
||||
}
|
||||
}
|
||||
|
||||
@ -949,7 +949,7 @@ fn run_work_multithreaded(sess: &Session,
|
||||
|
||||
pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(pname.index(&FullRange));
|
||||
let mut cmd = Command::new(&pname[]);
|
||||
|
||||
cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject))
|
||||
.arg(outputs.temp_path(config::OutputTypeAssembly));
|
||||
@ -958,20 +958,20 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
|
||||
match cmd.output() {
|
||||
Ok(prog) => {
|
||||
if !prog.status.success() {
|
||||
sess.err(format!("linking with `{}` failed: {}",
|
||||
sess.err(&format!("linking with `{}` failed: {}",
|
||||
pname,
|
||||
prog.status).index(&FullRange));
|
||||
sess.note(format!("{}", &cmd).index(&FullRange));
|
||||
prog.status)[]);
|
||||
sess.note(&format!("{}", &cmd)[]);
|
||||
let mut note = prog.error.clone();
|
||||
note.push_all(prog.output.index(&FullRange));
|
||||
sess.note(str::from_utf8(note.index(&FullRange)).unwrap());
|
||||
note.push_all(&prog.output[]);
|
||||
sess.note(str::from_utf8(¬e[]).unwrap());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
sess.err(format!("could not exec the linker `{}`: {}",
|
||||
sess.err(&format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e).index(&FullRange));
|
||||
e)[]);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
@ -1004,7 +1004,7 @@ unsafe fn configure_llvm(sess: &Session) {
|
||||
if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
|
||||
|
||||
for arg in sess.opts.cg.llvm_args.iter() {
|
||||
add((*arg).index(&FullRange));
|
||||
add(&(*arg)[]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
|
||||
// dump info about all the external crates referenced from this crate
|
||||
self.sess.cstore.iter_crate_data(|n, cmd| {
|
||||
self.fmt.external_crate_str(krate.span, cmd.name.index(&FullRange), n);
|
||||
self.fmt.external_crate_str(krate.span, &cmd.name[], n);
|
||||
});
|
||||
self.fmt.recorder.record("end_external_crates\n");
|
||||
}
|
||||
@ -143,7 +143,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
for &(ref span, ref qualname) in sub_paths.iter() {
|
||||
self.fmt.sub_mod_ref_str(path.span,
|
||||
*span,
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
self.cur_scope);
|
||||
}
|
||||
}
|
||||
@ -161,7 +161,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
for &(ref span, ref qualname) in sub_paths.iter() {
|
||||
self.fmt.sub_mod_ref_str(path.span,
|
||||
*span,
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
self.cur_scope);
|
||||
}
|
||||
}
|
||||
@ -180,17 +180,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
let (ref span, ref qualname) = sub_paths[len-2];
|
||||
self.fmt.sub_type_ref_str(path.span,
|
||||
*span,
|
||||
qualname.index(&FullRange));
|
||||
&qualname[]);
|
||||
|
||||
// write the other sub-paths
|
||||
if len <= 2 {
|
||||
return;
|
||||
}
|
||||
let sub_paths = sub_paths.index(&(0..(len-2)));
|
||||
let sub_paths = &sub_paths[0..(len-2)];
|
||||
for &(ref span, ref qualname) in sub_paths.iter() {
|
||||
self.fmt.sub_mod_ref_str(path.span,
|
||||
*span,
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
self.cur_scope);
|
||||
}
|
||||
}
|
||||
@ -198,8 +198,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
// looks up anything, not just a type
|
||||
fn lookup_type_ref(&self, ref_id: NodeId) -> Option<DefId> {
|
||||
if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) {
|
||||
self.sess.bug(format!("def_map has no key for {} in lookup_type_ref",
|
||||
ref_id).index(&FullRange));
|
||||
self.sess.bug(&format!("def_map has no key for {} in lookup_type_ref",
|
||||
ref_id)[]);
|
||||
}
|
||||
let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id];
|
||||
match def {
|
||||
@ -211,8 +211,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
fn lookup_def_kind(&self, ref_id: NodeId, span: Span) -> Option<recorder::Row> {
|
||||
let def_map = self.analysis.ty_cx.def_map.borrow();
|
||||
if !def_map.contains_key(&ref_id) {
|
||||
self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind",
|
||||
ref_id).index(&FullRange));
|
||||
self.sess.span_bug(span, &format!("def_map has no key for {} in lookup_def_kind",
|
||||
ref_id)[]);
|
||||
}
|
||||
let def = (*def_map)[ref_id];
|
||||
match def {
|
||||
@ -240,8 +240,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
def::DefUse(_) |
|
||||
def::DefMethod(..) |
|
||||
def::DefPrimTy(_) => {
|
||||
self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {:?}",
|
||||
def).index(&FullRange));
|
||||
self.sess.span_bug(span, &format!("lookup_def_kind for unexpected item: {:?}",
|
||||
def)[]);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -262,8 +262,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
span_utils.span_for_last_ident(p.span),
|
||||
id,
|
||||
qualname,
|
||||
path_to_string(p).index(&FullRange),
|
||||
typ.index(&FullRange));
|
||||
&path_to_string(p)[],
|
||||
&typ[]);
|
||||
}
|
||||
self.collected_paths.clear();
|
||||
}
|
||||
@ -285,7 +285,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
match item.node {
|
||||
ast::ItemImpl(_, _, _, _, ref ty, _) => {
|
||||
let mut result = String::from_str("<");
|
||||
result.push_str(ty_to_string(&**ty).index(&FullRange));
|
||||
result.push_str(&ty_to_string(&**ty)[]);
|
||||
|
||||
match ty::trait_of_item(&self.analysis.ty_cx,
|
||||
ast_util::local_def(method.id)) {
|
||||
@ -301,18 +301,18 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
_ => {
|
||||
self.sess.span_bug(method.span,
|
||||
format!("Container {} for method {} not an impl?",
|
||||
impl_id.node, method.id).index(&FullRange));
|
||||
&format!("Container {} for method {} not an impl?",
|
||||
impl_id.node, method.id)[]);
|
||||
},
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
self.sess.span_bug(method.span,
|
||||
format!("Container {} for method {} is not a node item {:?}",
|
||||
impl_id.node,
|
||||
method.id,
|
||||
self.analysis.ty_cx.map.get(impl_id.node)
|
||||
).index(&FullRange));
|
||||
&format!(
|
||||
"Container {} for method {} is not a node item {:?}",
|
||||
impl_id.node,
|
||||
method.id,
|
||||
self.analysis.ty_cx.map.get(impl_id.node))[]);
|
||||
},
|
||||
},
|
||||
None => match ty::trait_of_item(&self.analysis.ty_cx,
|
||||
@ -327,21 +327,21 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
_ => {
|
||||
self.sess.span_bug(method.span,
|
||||
format!("Could not find container {} for method {}",
|
||||
def_id.node, method.id).index(&FullRange));
|
||||
&format!("Could not find container {} for method {}",
|
||||
def_id.node, method.id)[]);
|
||||
}
|
||||
}
|
||||
},
|
||||
None => {
|
||||
self.sess.span_bug(method.span,
|
||||
format!("Could not find container for method {}",
|
||||
method.id).index(&FullRange));
|
||||
&format!("Could not find container for method {}",
|
||||
method.id)[]);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
qualname.push_str(get_ident(method.pe_ident()).get());
|
||||
let qualname = qualname.index(&FullRange);
|
||||
let qualname = &qualname[];
|
||||
|
||||
// record the decl for this def (if it has one)
|
||||
let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx,
|
||||
@ -430,13 +430,13 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
Some(sub_span) => self.fmt.field_str(field.span,
|
||||
Some(sub_span),
|
||||
field.node.id,
|
||||
name.get().index(&FullRange),
|
||||
qualname.index(&FullRange),
|
||||
typ.index(&FullRange),
|
||||
&name.get()[],
|
||||
&qualname[],
|
||||
&typ[],
|
||||
scope_id),
|
||||
None => self.sess.span_bug(field.span,
|
||||
format!("Could not find sub-span for field {}",
|
||||
qualname).index(&FullRange)),
|
||||
&format!("Could not find sub-span for field {}",
|
||||
qualname)[]),
|
||||
}
|
||||
},
|
||||
_ => (),
|
||||
@ -463,7 +463,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.typedef_str(full_span,
|
||||
Some(*param_ss),
|
||||
param.id,
|
||||
name.index(&FullRange),
|
||||
&name[],
|
||||
"");
|
||||
}
|
||||
self.visit_generics(generics);
|
||||
@ -480,10 +480,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.fn_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
self.cur_scope);
|
||||
|
||||
self.process_formals(&decl.inputs, qualname.index(&FullRange));
|
||||
self.process_formals(&decl.inputs, &qualname[]);
|
||||
|
||||
// walk arg and return types
|
||||
for arg in decl.inputs.iter() {
|
||||
@ -497,7 +497,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
// walk the body
|
||||
self.nest(item.id, |v| v.visit_block(&*body));
|
||||
|
||||
self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id);
|
||||
self.process_generic_params(ty_params, item.span, &qualname[], item.id);
|
||||
}
|
||||
|
||||
fn process_static(&mut self,
|
||||
@ -519,9 +519,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
sub_span,
|
||||
item.id,
|
||||
get_ident(item.ident).get(),
|
||||
qualname.index(&FullRange),
|
||||
value.index(&FullRange),
|
||||
ty_to_string(&*typ).index(&FullRange),
|
||||
&qualname[],
|
||||
&value[],
|
||||
&ty_to_string(&*typ)[],
|
||||
self.cur_scope);
|
||||
|
||||
// walk type and init value
|
||||
@ -542,9 +542,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
sub_span,
|
||||
item.id,
|
||||
get_ident(item.ident).get(),
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
"",
|
||||
ty_to_string(&*typ).index(&FullRange),
|
||||
&ty_to_string(&*typ)[],
|
||||
self.cur_scope);
|
||||
|
||||
// walk type and init value
|
||||
@ -568,17 +568,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
sub_span,
|
||||
item.id,
|
||||
ctor_id,
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
self.cur_scope,
|
||||
val.index(&FullRange));
|
||||
&val[]);
|
||||
|
||||
// fields
|
||||
for field in def.fields.iter() {
|
||||
self.process_struct_field_def(field, qualname.index(&FullRange), item.id);
|
||||
self.process_struct_field_def(field, &qualname[], item.id);
|
||||
self.visit_ty(&*field.node.ty);
|
||||
}
|
||||
|
||||
self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id);
|
||||
self.process_generic_params(ty_params, item.span, &qualname[], item.id);
|
||||
}
|
||||
|
||||
fn process_enum(&mut self,
|
||||
@ -591,12 +591,12 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
Some(sub_span) => self.fmt.enum_str(item.span,
|
||||
Some(sub_span),
|
||||
item.id,
|
||||
enum_name.index(&FullRange),
|
||||
&enum_name[],
|
||||
self.cur_scope,
|
||||
val.index(&FullRange)),
|
||||
&val[]),
|
||||
None => self.sess.span_bug(item.span,
|
||||
format!("Could not find subspan for enum {}",
|
||||
enum_name).index(&FullRange)),
|
||||
&format!("Could not find subspan for enum {}",
|
||||
enum_name)[]),
|
||||
}
|
||||
for variant in enum_definition.variants.iter() {
|
||||
let name = get_ident(variant.node.name);
|
||||
@ -612,9 +612,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.span.span_for_first_ident(variant.span),
|
||||
variant.node.id,
|
||||
name,
|
||||
qualname.index(&FullRange),
|
||||
enum_name.index(&FullRange),
|
||||
val.index(&FullRange),
|
||||
&qualname[],
|
||||
&enum_name[],
|
||||
&val[],
|
||||
item.id);
|
||||
for arg in args.iter() {
|
||||
self.visit_ty(&*arg.ty);
|
||||
@ -630,9 +630,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.span.span_for_first_ident(variant.span),
|
||||
variant.node.id,
|
||||
ctor_id,
|
||||
qualname.index(&FullRange),
|
||||
enum_name.index(&FullRange),
|
||||
val.index(&FullRange),
|
||||
&qualname[],
|
||||
&enum_name[],
|
||||
&val[],
|
||||
item.id);
|
||||
|
||||
for field in struct_def.fields.iter() {
|
||||
@ -643,7 +643,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
self.process_generic_params(ty_params, item.span, enum_name.index(&FullRange), item.id);
|
||||
self.process_generic_params(ty_params, item.span, &enum_name[], item.id);
|
||||
}
|
||||
|
||||
fn process_impl(&mut self,
|
||||
@ -703,9 +703,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.trait_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
self.cur_scope,
|
||||
val.index(&FullRange));
|
||||
&val[]);
|
||||
|
||||
// super-traits
|
||||
for super_bound in trait_refs.iter() {
|
||||
@ -737,7 +737,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
|
||||
// walk generics and methods
|
||||
self.process_generic_params(generics, item.span, qualname.index(&FullRange), item.id);
|
||||
self.process_generic_params(generics, item.span, &qualname[], item.id);
|
||||
for method in methods.iter() {
|
||||
self.visit_trait_item(method)
|
||||
}
|
||||
@ -755,9 +755,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.mod_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
qualname.index(&FullRange),
|
||||
&qualname[],
|
||||
self.cur_scope,
|
||||
filename.index(&FullRange));
|
||||
&filename[]);
|
||||
|
||||
self.nest(item.id, |v| visit::walk_mod(v, m));
|
||||
}
|
||||
@ -840,8 +840,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
def_id,
|
||||
self.cur_scope),
|
||||
_ => self.sess.span_bug(span,
|
||||
format!("Unexpected def kind while looking up path in '{}'",
|
||||
self.span.snippet(span)).index(&FullRange)),
|
||||
&format!("Unexpected def kind while looking up path in '{}'",
|
||||
self.span.snippet(span))[]),
|
||||
}
|
||||
// modules or types in the path prefix
|
||||
match *def {
|
||||
@ -959,7 +959,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.cur_scope);
|
||||
|
||||
// walk receiver and args
|
||||
visit::walk_exprs(self, args.index(&FullRange));
|
||||
visit::walk_exprs(self, &args[]);
|
||||
}
|
||||
|
||||
fn process_pat(&mut self, p:&ast::Pat) {
|
||||
@ -975,8 +975,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
Some(sd) => sd,
|
||||
None => {
|
||||
self.sess.span_bug(p.span,
|
||||
format!("Could not find struct_def for `{}`",
|
||||
self.span.snippet(p.span)).index(&FullRange));
|
||||
&format!("Could not find struct_def for `{}`",
|
||||
self.span.snippet(p.span))[]);
|
||||
}
|
||||
};
|
||||
for &Spanned { node: ref field, span } in fields.iter() {
|
||||
@ -1061,8 +1061,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.typedef_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
qualname.index(&FullRange),
|
||||
value.index(&FullRange));
|
||||
&qualname[],
|
||||
&value[]);
|
||||
|
||||
self.visit_ty(&**ty);
|
||||
self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id);
|
||||
@ -1121,13 +1121,13 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
},
|
||||
None => {
|
||||
self.sess.span_bug(method_type.span,
|
||||
format!("Could not find trait for method {}",
|
||||
method_type.id).index(&FullRange));
|
||||
&format!("Could not find trait for method {}",
|
||||
method_type.id)[]);
|
||||
},
|
||||
};
|
||||
|
||||
qualname.push_str(get_ident(method_type.ident).get());
|
||||
let qualname = qualname.index(&FullRange);
|
||||
let qualname = &qualname[];
|
||||
|
||||
let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn);
|
||||
self.fmt.method_decl_str(method_type.span,
|
||||
@ -1262,7 +1262,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
id,
|
||||
cnum,
|
||||
name,
|
||||
s.index(&FullRange),
|
||||
&s[],
|
||||
self.cur_scope);
|
||||
},
|
||||
}
|
||||
@ -1371,8 +1371,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
|
||||
let mut id = String::from_str("$");
|
||||
id.push_str(ex.id.to_string().index(&FullRange));
|
||||
self.process_formals(&decl.inputs, id.index(&FullRange));
|
||||
id.push_str(&ex.id.to_string()[]);
|
||||
self.process_formals(&decl.inputs, &id[]);
|
||||
|
||||
// walk arg and return types
|
||||
for arg in decl.inputs.iter() {
|
||||
@ -1418,8 +1418,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
let def_map = self.analysis.ty_cx.def_map.borrow();
|
||||
if !def_map.contains_key(&id) {
|
||||
self.sess.span_bug(p.span,
|
||||
format!("def_map has no key for {} in visit_arm",
|
||||
id).index(&FullRange));
|
||||
&format!("def_map has no key for {} in visit_arm",
|
||||
id)[]);
|
||||
}
|
||||
let def = &(*def_map)[id];
|
||||
match *def {
|
||||
@ -1434,8 +1434,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.variable_str(p.span,
|
||||
Some(p.span),
|
||||
id,
|
||||
path_to_string(p).index(&FullRange),
|
||||
value.index(&FullRange),
|
||||
&path_to_string(p)[],
|
||||
&value[],
|
||||
"")
|
||||
}
|
||||
def::DefVariant(..) => {
|
||||
@ -1490,9 +1490,9 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.variable_str(p.span,
|
||||
sub_span,
|
||||
id,
|
||||
path_to_string(p).index(&FullRange),
|
||||
value.index(&FullRange),
|
||||
typ.index(&FullRange));
|
||||
&path_to_string(p)[],
|
||||
&value[],
|
||||
&typ[]);
|
||||
}
|
||||
self.collected_paths.clear();
|
||||
|
||||
@ -1511,7 +1511,7 @@ pub fn process_crate(sess: &Session,
|
||||
}
|
||||
|
||||
assert!(analysis.glob_map.is_some());
|
||||
let cratename = match attr::find_crate_name(krate.attrs.index(&FullRange)) {
|
||||
let cratename = match attr::find_crate_name(&krate.attrs[]) {
|
||||
Some(name) => name.get().to_string(),
|
||||
None => {
|
||||
info!("Could not find crate name, using 'unknown_crate'");
|
||||
@ -1531,8 +1531,8 @@ pub fn process_crate(sess: &Session,
|
||||
};
|
||||
|
||||
match fs::mkdir_recursive(&root_path, io::USER_RWX) {
|
||||
Err(e) => sess.err(format!("Could not create directory {}: {}",
|
||||
root_path.display(), e).index(&FullRange)),
|
||||
Err(e) => sess.err(&format!("Could not create directory {}: {}",
|
||||
root_path.display(), e)[]),
|
||||
_ => (),
|
||||
}
|
||||
|
||||
@ -1549,7 +1549,7 @@ pub fn process_crate(sess: &Session,
|
||||
Ok(f) => box f,
|
||||
Err(e) => {
|
||||
let disp = root_path.display();
|
||||
sess.fatal(format!("Could not open {}: {}", disp, e).index(&FullRange));
|
||||
sess.fatal(&format!("Could not open {}: {}", disp, e)[]);
|
||||
}
|
||||
};
|
||||
root_path.pop();
|
||||
@ -1575,7 +1575,7 @@ pub fn process_crate(sess: &Session,
|
||||
cur_scope: 0
|
||||
};
|
||||
|
||||
visitor.dump_crate_info(cratename.index(&FullRange), krate);
|
||||
visitor.dump_crate_info(&cratename[], krate);
|
||||
|
||||
visit::walk_crate(&mut visitor, krate);
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ impl Recorder {
|
||||
assert!(self.dump_spans);
|
||||
let result = format!("span,kind,{},{},text,\"{}\"\n",
|
||||
kind, su.extent_str(span), escape(su.snippet(span)));
|
||||
self.record(result.index(&FullRange));
|
||||
self.record(&result[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -158,17 +158,17 @@ impl<'a> FmtStrs<'a> {
|
||||
values: Vec<String>,
|
||||
span: Span) -> Option<String> {
|
||||
if values.len() != fields.len() {
|
||||
self.span.sess.span_bug(span, format!(
|
||||
self.span.sess.span_bug(span, &format!(
|
||||
"Mismatch between length of fields for '{}', expected '{}', found '{}'",
|
||||
kind, fields.len(), values.len()).index(&FullRange));
|
||||
kind, fields.len(), values.len())[]);
|
||||
}
|
||||
|
||||
let values = values.iter().map(|s| {
|
||||
// Never take more than 1020 chars
|
||||
if s.len() > 1020 {
|
||||
s.index(&(0..1020))
|
||||
&s[0..1020]
|
||||
} else {
|
||||
s.index(&FullRange)
|
||||
&s[]
|
||||
}
|
||||
});
|
||||
|
||||
@ -184,7 +184,7 @@ impl<'a> FmtStrs<'a> {
|
||||
}
|
||||
)));
|
||||
Some(strs.fold(String::new(), |mut s, ss| {
|
||||
s.push_str(ss.index(&FullRange));
|
||||
s.push_str(&ss[]);
|
||||
s
|
||||
}))
|
||||
}
|
||||
@ -196,9 +196,9 @@ impl<'a> FmtStrs<'a> {
|
||||
let (label, ref fields, needs_span, dump_spans) = FmtStrs::lookup_row(kind);
|
||||
|
||||
if needs_span {
|
||||
self.span.sess.span_bug(span, format!(
|
||||
self.span.sess.span_bug(span, &format!(
|
||||
"Called record_without_span for '{}' which does requires a span",
|
||||
label).index(&FullRange));
|
||||
label)[]);
|
||||
}
|
||||
assert!(!dump_spans);
|
||||
|
||||
@ -212,9 +212,9 @@ impl<'a> FmtStrs<'a> {
|
||||
};
|
||||
|
||||
let mut result = String::from_str(label);
|
||||
result.push_str(values_str.index(&FullRange));
|
||||
result.push_str(&values_str[]);
|
||||
result.push_str("\n");
|
||||
self.recorder.record(result.index(&FullRange));
|
||||
self.recorder.record(&result[]);
|
||||
}
|
||||
|
||||
pub fn record_with_span(&mut self,
|
||||
@ -245,7 +245,7 @@ impl<'a> FmtStrs<'a> {
|
||||
None => return,
|
||||
};
|
||||
let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str);
|
||||
self.recorder.record(result.index(&FullRange));
|
||||
self.recorder.record(&result[]);
|
||||
}
|
||||
|
||||
pub fn check_and_record(&mut self,
|
||||
@ -275,7 +275,7 @@ impl<'a> FmtStrs<'a> {
|
||||
// variable def's node id
|
||||
let mut qualname = String::from_str(name);
|
||||
qualname.push_str("$");
|
||||
qualname.push_str(id.to_string().index(&FullRange));
|
||||
qualname.push_str(&id.to_string()[]);
|
||||
self.check_and_record(Variable,
|
||||
span,
|
||||
sub_span,
|
||||
|
@ -217,8 +217,8 @@ impl<'a> SpanUtils<'a> {
|
||||
if bracket_count != 0 {
|
||||
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
||||
self.sess.span_bug(span,
|
||||
format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
|
||||
self.snippet(span), loc.file.name, loc.line).index(&FullRange));
|
||||
&format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
|
||||
self.snippet(span), loc.file.name, loc.line)[]);
|
||||
}
|
||||
if result.is_none() && prev.tok.is_ident() && bracket_count == 0 {
|
||||
return self.make_sub_span(span, Some(prev.sp));
|
||||
@ -242,9 +242,9 @@ impl<'a> SpanUtils<'a> {
|
||||
if ts.tok == token::Eof {
|
||||
if bracket_count != 0 {
|
||||
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
||||
self.sess.span_bug(span, format!(
|
||||
self.sess.span_bug(span, &format!(
|
||||
"Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}",
|
||||
self.snippet(span), loc.file.name, loc.line).index(&FullRange));
|
||||
self.snippet(span), loc.file.name, loc.line)[]);
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
@ -427,7 +427,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
|
||||
let _indenter = indenter();
|
||||
|
||||
m.iter().filter_map(|br| {
|
||||
e(br.pats.index(&FullRange)).map(|pats| {
|
||||
e(&br.pats[]).map(|pats| {
|
||||
let this = br.pats[col];
|
||||
let mut bound_ptrs = br.bound_ptrs.clone();
|
||||
match this.node {
|
||||
@ -471,8 +471,8 @@ fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
// Collect all of the matches that can match against anything.
|
||||
enter_match(bcx, dm, m, col, val, |pats| {
|
||||
if pat_is_binding_or_wild(dm, &*pats[col]) {
|
||||
let mut r = pats.index(&(0..col)).to_vec();
|
||||
r.push_all(pats.index(&((col + 1)..)));
|
||||
let mut r = pats[0..col].to_vec();
|
||||
r.push_all(&pats[(col + 1)..]);
|
||||
Some(r)
|
||||
} else {
|
||||
None
|
||||
@ -548,7 +548,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>(
|
||||
param_env: param_env,
|
||||
};
|
||||
enter_match(bcx, dm, m, col, val, |pats|
|
||||
check_match::specialize(&mcx, pats.index(&FullRange), &ctor, col, variant_size)
|
||||
check_match::specialize(&mcx, &pats[], &ctor, col, variant_size)
|
||||
)
|
||||
}
|
||||
|
||||
@ -789,8 +789,8 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
|
||||
-> Result<'blk, 'tcx> {
|
||||
let did = langcall(cx,
|
||||
None,
|
||||
format!("comparison of `{}`",
|
||||
cx.ty_to_string(rhs_t)).index(&FullRange),
|
||||
&format!("comparison of `{}`",
|
||||
cx.ty_to_string(rhs_t))[],
|
||||
StrEqFnLangItem);
|
||||
callee::trans_lang_call(cx, did, &[lhs, rhs], None)
|
||||
}
|
||||
@ -945,7 +945,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
if has_nested_bindings(m, col) {
|
||||
let expanded = expand_nested_bindings(bcx, m, col, val);
|
||||
compile_submatch_continue(bcx,
|
||||
expanded.index(&FullRange),
|
||||
&expanded[],
|
||||
vals,
|
||||
chk,
|
||||
col,
|
||||
@ -967,7 +967,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
bcx = compile_guard(bcx,
|
||||
&**guard_expr,
|
||||
m[0].data,
|
||||
m.index(&(1..m.len())),
|
||||
&m[1..m.len()],
|
||||
vals,
|
||||
chk,
|
||||
has_genuine_default);
|
||||
@ -990,8 +990,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
let tcx = bcx.tcx();
|
||||
let dm = &tcx.def_map;
|
||||
|
||||
let mut vals_left = vals.index(&(0u..col)).to_vec();
|
||||
vals_left.push_all(vals.index(&((col + 1u)..)));
|
||||
let mut vals_left = vals[0u..col].to_vec();
|
||||
vals_left.push_all(&vals[(col + 1u)..]);
|
||||
let ccx = bcx.fcx.ccx;
|
||||
|
||||
// Find a real id (we're adding placeholder wildcard patterns, but
|
||||
@ -1191,10 +1191,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
}
|
||||
let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
|
||||
let mut opt_vals = unpacked;
|
||||
opt_vals.push_all(vals_left.index(&FullRange));
|
||||
opt_vals.push_all(&vals_left[]);
|
||||
compile_submatch(opt_cx,
|
||||
opt_ms.index(&FullRange),
|
||||
opt_vals.index(&FullRange),
|
||||
&opt_ms[],
|
||||
&opt_vals[],
|
||||
branch_chk.as_ref().unwrap_or(chk),
|
||||
has_genuine_default);
|
||||
}
|
||||
@ -1213,8 +1213,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
}
|
||||
_ => {
|
||||
compile_submatch(else_cx,
|
||||
defaults.index(&FullRange),
|
||||
vals_left.index(&FullRange),
|
||||
&defaults[],
|
||||
&vals_left[],
|
||||
chk,
|
||||
has_genuine_default);
|
||||
}
|
||||
@ -1333,7 +1333,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat,
|
||||
"__llmatch");
|
||||
trmode = TrByCopy(alloca_no_lifetime(bcx,
|
||||
llvariable_ty,
|
||||
bcx.ident(ident).index(&FullRange)));
|
||||
&bcx.ident(ident)[]));
|
||||
}
|
||||
ast::BindByValue(_) => {
|
||||
// in this case, the final type of the variable will be T,
|
||||
@ -1341,13 +1341,13 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat,
|
||||
// above
|
||||
llmatch = alloca_no_lifetime(bcx,
|
||||
llvariable_ty.ptr_to(),
|
||||
bcx.ident(ident).index(&FullRange));
|
||||
&bcx.ident(ident)[]);
|
||||
trmode = TrByMove;
|
||||
}
|
||||
ast::BindByRef(_) => {
|
||||
llmatch = alloca_no_lifetime(bcx,
|
||||
llvariable_ty,
|
||||
bcx.ident(ident).index(&FullRange));
|
||||
&bcx.ident(ident)[]);
|
||||
trmode = TrByRef;
|
||||
}
|
||||
};
|
||||
@ -1415,7 +1415,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
|
||||
&& arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle)
|
||||
});
|
||||
|
||||
compile_submatch(bcx, matches.index(&FullRange), &[discr_datum.val], &chk, has_default);
|
||||
compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default);
|
||||
|
||||
let mut arm_cxs = Vec::new();
|
||||
for arm_data in arm_datas.iter() {
|
||||
@ -1429,7 +1429,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
|
||||
arm_cxs.push(bcx);
|
||||
}
|
||||
|
||||
bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.index(&FullRange));
|
||||
bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[]);
|
||||
return bcx;
|
||||
}
|
||||
|
||||
@ -1582,7 +1582,7 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
|
||||
let var_ty = node_id_type(bcx, p_id);
|
||||
|
||||
// Allocate memory on stack for the binding.
|
||||
let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).index(&FullRange));
|
||||
let llval = alloc_ty(bcx, var_ty, &bcx.ident(*ident)[]);
|
||||
|
||||
// Subtle: be sure that we *populate* the memory *before*
|
||||
// we schedule the cleanup.
|
||||
@ -1619,8 +1619,8 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
pat.repr(bcx.tcx()));
|
||||
|
||||
if bcx.sess().asm_comments() {
|
||||
add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
|
||||
pat.repr(bcx.tcx())).index(&FullRange));
|
||||
add_comment(bcx, &format!("bind_irrefutable_pat(pat={})",
|
||||
pat.repr(bcx.tcx()))[]);
|
||||
}
|
||||
|
||||
let _indenter = indenter();
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user