auto merge of #7060 : huonw/rust/more-str, r=thestinger

There are now only half-a-dozen or so functions left `std::str` that should be methods.

Highlights:
- `.substr` was removed, since most of the uses of it in the code base were actually incorrect (it had a weird mixing of a byte index and a unicode character count), adding `.slice_chars` if one wants to handle characters, and the normal `.slice` method to handle bytes.
- Code duplication between the two impls for `connect` and `concat` was removed via a new `Str` trait, that is purely designed to allow an explicit -> `&str` conversion (`.as_slice()`)
- Deconfuse the 5 different functions for converting to `[u8]` (3 of which had actually incorrect documentation: implying that they didn't have the null terminator), into 3: `as_bytes` (all strings), `as_bytes_with_null` (`&'static str`, `@str` and `~str`) and `as_bytes_with_null_consume` (`~str`). None of these allocate, unlike the old versions.

(cc @thestinger)
This commit is contained in:
bors 2013-06-11 21:37:43 -07:00
commit cc80652e4a
80 changed files with 818 additions and 789 deletions

View File

@ -1410,7 +1410,7 @@ let new_favorite_crayon_name = favorite_crayon_name.trim();
if favorite_crayon_name.len() > 5 { if favorite_crayon_name.len() > 5 {
// Create a substring // Create a substring
println(favorite_crayon_name.substr(0, 5)); println(favorite_crayon_name.slice_chars(0, 5));
} }
~~~ ~~~

View File

@ -171,8 +171,8 @@ fn run_pretty_test(config: &config, props: &TestProps, testfile: &Path) {
if props.pp_exact.is_some() { if props.pp_exact.is_some() {
// Now we have to care about line endings // Now we have to care about line endings
let cr = ~"\r"; let cr = ~"\r";
actual = str::replace(actual, cr, ""); actual = actual.replace(cr, "");
expected = str::replace(expected, cr, ""); expected = expected.replace(cr, "");
} }
compare_source(expected, actual); compare_source(expected, actual);
@ -238,7 +238,7 @@ fn run_debuginfo_test(config: &config, props: &TestProps, testfile: &Path) {
// do not optimize debuginfo tests // do not optimize debuginfo tests
let mut config = match config.rustcflags { let mut config = match config.rustcflags {
Some(ref flags) => config { Some(ref flags) => config {
rustcflags: Some(str::replace(*flags, "-O", "")), rustcflags: Some(flags.replace("-O", "")),
.. copy *config .. copy *config
}, },
None => copy *config None => copy *config
@ -254,7 +254,7 @@ fn run_debuginfo_test(config: &config, props: &TestProps, testfile: &Path) {
} }
// write debugger script // write debugger script
let script_str = str::append(cmds, "\nquit\n"); let script_str = cmds.append("\nquit\n");
debug!("script_str = %s", script_str); debug!("script_str = %s", script_str);
dump_output_file(config, testfile, script_str, "debugger.script"); dump_output_file(config, testfile, script_str, "debugger.script");

View File

@ -12,7 +12,6 @@
use core::prelude::*; use core::prelude::*;
use core::str;
use core::vec; use core::vec;
/// A trait for converting a value to base64 encoding. /// A trait for converting a value to base64 encoding.
@ -111,7 +110,7 @@ impl<'self> ToBase64 for &'self str {
* *
*/ */
fn to_base64(&self) -> ~str { fn to_base64(&self) -> ~str {
str::to_bytes(*self).to_base64() self.as_bytes().to_base64()
} }
} }
@ -224,7 +223,7 @@ impl<'self> FromBase64 for &'self str {
* ~~~ * ~~~
*/ */
fn from_base64(&self) -> ~[u8] { fn from_base64(&self) -> ~[u8] {
str::to_bytes(*self).from_base64() self.as_bytes().from_base64()
} }
} }
@ -245,12 +244,12 @@ mod tests {
#[test] #[test]
fn test_from_base64() { fn test_from_base64() {
assert_eq!("".from_base64(), str::to_bytes("")); assert_eq!("".from_base64(), "".as_bytes().to_owned());
assert_eq!("Zg==".from_base64(), str::to_bytes("f")); assert_eq!("Zg==".from_base64(), "f".as_bytes().to_owned());
assert_eq!("Zm8=".from_base64(), str::to_bytes("fo")); assert_eq!("Zm8=".from_base64(), "fo".as_bytes().to_owned());
assert_eq!("Zm9v".from_base64(), str::to_bytes("foo")); assert_eq!("Zm9v".from_base64(), "foo".as_bytes().to_owned());
assert_eq!("Zm9vYg==".from_base64(), str::to_bytes("foob")); assert_eq!("Zm9vYg==".from_base64(), "foob".as_bytes().to_owned());
assert_eq!("Zm9vYmE=".from_base64(), str::to_bytes("fooba")) assert_eq!("Zm9vYmE=".from_base64(), "fooba".as_bytes().to_owned());
assert_eq!("Zm9vYmFy".from_base64(), str::to_bytes("foobar")); assert_eq!("Zm9vYmFy".from_base64(), "foobar".as_bytes().to_owned());
} }
} }

View File

@ -607,7 +607,6 @@ pub mod writer {
use core::cast; use core::cast;
use core::io; use core::io;
use core::str;
// ebml writing // ebml writing
pub struct Encoder { pub struct Encoder {
@ -725,7 +724,7 @@ pub mod writer {
} }
pub fn wr_tagged_str(&mut self, tag_id: uint, v: &str) { pub fn wr_tagged_str(&mut self, tag_id: uint, v: &str) {
str::byte_slice(v, |b| self.wr_tagged_bytes(tag_id, b)); self.wr_tagged_bytes(tag_id, v.as_bytes());
} }
pub fn wr_bytes(&mut self, b: &[u8]) { pub fn wr_bytes(&mut self, b: &[u8]) {
@ -735,7 +734,7 @@ pub mod writer {
pub fn wr_str(&mut self, s: &str) { pub fn wr_str(&mut self, s: &str) {
debug!("Write str: %?", s); debug!("Write str: %?", s);
self.writer.write(str::to_bytes(s)); self.writer.write(s.as_bytes());
} }
} }

View File

@ -487,7 +487,7 @@ mod test {
let mut buf : ~[u8] = vec::from_elem(6, 0u8); let mut buf : ~[u8] = vec::from_elem(6, 0u8);
let count = fi.read(buf, 10); let count = fi.read(buf, 10);
assert_eq!(count, 6); assert_eq!(count, 6);
assert_eq!(buf, "0\n1\n2\n".to_bytes()); assert_eq!(buf, "0\n1\n2\n".as_bytes().to_owned());
assert!(fi.eof()) assert!(fi.eof())
assert_eq!(fi.state().line_num, 3); assert_eq!(fi.state().line_num, 3);
} }

View File

@ -450,7 +450,7 @@ pub mod flatteners {
T: Decodable<D>>( T: Decodable<D>>(
buf: &[u8]) buf: &[u8])
-> T { -> T {
let buf = vec::to_owned(buf); let buf = buf.to_owned();
let buf_reader = @BufReader::new(buf); let buf_reader = @BufReader::new(buf);
let reader = buf_reader as @Reader; let reader = buf_reader as @Reader;
let mut deser: D = FromReader::from_reader(reader); let mut deser: D = FromReader::from_reader(reader);

View File

@ -345,7 +345,7 @@ pub fn getopts(args: &[~str], opts: &[Opt]) -> Result {
} }
i += 1; i += 1;
} }
return Ok(Matches {opts: vec::to_owned(opts), return Ok(Matches {opts: opts.to_owned(),
vals: vals, vals: vals,
free: free}); free: free});
} }
@ -447,7 +447,7 @@ pub fn opt_default(mm: &Matches, nm: &str, def: &str) -> Option<~str> {
let vals = opt_vals(mm, nm); let vals = opt_vals(mm, nm);
if vals.is_empty() { return None::<~str>; } if vals.is_empty() { return None::<~str>; }
return match vals[0] { Val(ref s) => Some::<~str>(copy *s), return match vals[0] { Val(ref s) => Some::<~str>(copy *s),
_ => Some::<~str>(str::to_owned(def)) } _ => Some::<~str>(def.to_owned()) }
} }
#[deriving(Eq)] #[deriving(Eq)]
@ -487,10 +487,10 @@ pub mod groups {
desc: &str, hint: &str) -> OptGroup { desc: &str, hint: &str) -> OptGroup {
let len = short_name.len(); let len = short_name.len();
assert!(len == 1 || len == 0); assert!(len == 1 || len == 0);
return OptGroup { short_name: str::to_owned(short_name), return OptGroup { short_name: short_name.to_owned(),
long_name: str::to_owned(long_name), long_name: long_name.to_owned(),
hint: str::to_owned(hint), hint: hint.to_owned(),
desc: str::to_owned(desc), desc: desc.to_owned(),
hasarg: Yes, hasarg: Yes,
occur: Req}; occur: Req};
} }
@ -500,10 +500,10 @@ pub mod groups {
desc: &str, hint: &str) -> OptGroup { desc: &str, hint: &str) -> OptGroup {
let len = short_name.len(); let len = short_name.len();
assert!(len == 1 || len == 0); assert!(len == 1 || len == 0);
return OptGroup {short_name: str::to_owned(short_name), return OptGroup {short_name: short_name.to_owned(),
long_name: str::to_owned(long_name), long_name: long_name.to_owned(),
hint: str::to_owned(hint), hint: hint.to_owned(),
desc: str::to_owned(desc), desc: desc.to_owned(),
hasarg: Yes, hasarg: Yes,
occur: Optional}; occur: Optional};
} }
@ -513,10 +513,10 @@ pub mod groups {
desc: &str) -> OptGroup { desc: &str) -> OptGroup {
let len = short_name.len(); let len = short_name.len();
assert!(len == 1 || len == 0); assert!(len == 1 || len == 0);
return OptGroup {short_name: str::to_owned(short_name), return OptGroup {short_name: short_name.to_owned(),
long_name: str::to_owned(long_name), long_name: long_name.to_owned(),
hint: ~"", hint: ~"",
desc: str::to_owned(desc), desc: desc.to_owned(),
hasarg: No, hasarg: No,
occur: Optional}; occur: Optional};
} }
@ -526,10 +526,10 @@ pub mod groups {
desc: &str, hint: &str) -> OptGroup { desc: &str, hint: &str) -> OptGroup {
let len = short_name.len(); let len = short_name.len();
assert!(len == 1 || len == 0); assert!(len == 1 || len == 0);
return OptGroup {short_name: str::to_owned(short_name), return OptGroup {short_name: short_name.to_owned(),
long_name: str::to_owned(long_name), long_name: long_name.to_owned(),
hint: str::to_owned(hint), hint: hint.to_owned(),
desc: str::to_owned(desc), desc: desc.to_owned(),
hasarg: Maybe, hasarg: Maybe,
occur: Optional}; occur: Optional};
} }
@ -542,10 +542,10 @@ pub mod groups {
desc: &str, hint: &str) -> OptGroup { desc: &str, hint: &str) -> OptGroup {
let len = short_name.len(); let len = short_name.len();
assert!(len == 1 || len == 0); assert!(len == 1 || len == 0);
return OptGroup {short_name: str::to_owned(short_name), return OptGroup {short_name: short_name.to_owned(),
long_name: str::to_owned(long_name), long_name: long_name.to_owned(),
hint: str::to_owned(hint), hint: hint.to_owned(),
desc: str::to_owned(desc), desc: desc.to_owned(),
hasarg: Yes, hasarg: Yes,
occur: Multi}; occur: Multi};
} }
@ -593,7 +593,7 @@ pub mod groups {
*/ */
pub fn usage(brief: &str, opts: &[OptGroup]) -> ~str { pub fn usage(brief: &str, opts: &[OptGroup]) -> ~str {
let desc_sep = ~"\n" + str::repeat(" ", 24); let desc_sep = ~"\n" + " ".repeat(24);
let rows = vec::map(opts, |optref| { let rows = vec::map(opts, |optref| {
let OptGroup{short_name: short_name, let OptGroup{short_name: short_name,
@ -603,7 +603,7 @@ pub mod groups {
hasarg: hasarg, hasarg: hasarg,
_} = copy *optref; _} = copy *optref;
let mut row = str::repeat(" ", 4); let mut row = " ".repeat(4);
// short option // short option
row += match short_name.len() { row += match short_name.len() {
@ -629,7 +629,7 @@ pub mod groups {
// here we just need to indent the start of the description // here we just need to indent the start of the description
let rowlen = row.len(); let rowlen = row.len();
row += if rowlen < 24 { row += if rowlen < 24 {
str::repeat(" ", 24 - rowlen) " ".repeat(24 - rowlen)
} else { } else {
copy desc_sep copy desc_sep
}; };
@ -654,7 +654,7 @@ pub mod groups {
row row
}); });
return str::to_owned(brief) + return brief.to_owned() +
"\n\nOptions:\n" + "\n\nOptions:\n" +
rows.connect("\n") + rows.connect("\n") +
"\n\n"; "\n\n";

View File

@ -10,7 +10,6 @@
use core::prelude::*; use core::prelude::*;
use core::str;
use core::uint; use core::uint;
use core::vec; use core::vec;
@ -30,7 +29,7 @@ pub fn md4(msg: &[u8]) -> Quad {
let orig_len: u64 = (msg.len() * 8u) as u64; let orig_len: u64 = (msg.len() * 8u) as u64;
// pad message // pad message
let mut msg = vec::append(vec::to_owned(msg), [0x80u8]); let mut msg = vec::append(msg.to_owned(), [0x80u8]);
let mut bitlen = orig_len + 8u64; let mut bitlen = orig_len + 8u64;
while (bitlen + 64u64) % 512u64 > 0u64 { while (bitlen + 64u64) % 512u64 > 0u64 {
msg.push(0u8); msg.push(0u8);
@ -129,7 +128,7 @@ pub fn md4_str(msg: &[u8]) -> ~str {
/// Calculates the md4 hash of a string, returning the hex-encoded version of /// Calculates the md4 hash of a string, returning the hex-encoded version of
/// the hash /// the hash
pub fn md4_text(msg: &str) -> ~str { md4_str(str::to_bytes(msg)) } pub fn md4_text(msg: &str) -> ~str { md4_str(msg.as_bytes()) }
#[test] #[test]
fn test_md4() { fn test_md4() {

View File

@ -1636,7 +1636,7 @@ mod test {
assert_eq!(net::ip::get_port(&sock.get_peer_addr()), 8887); assert_eq!(net::ip::get_port(&sock.get_peer_addr()), 8887);
// Fulfill the protocol the test server expects // Fulfill the protocol the test server expects
let resp_bytes = str::to_bytes("ping"); let resp_bytes = "ping".as_bytes().to_owned();
tcp_write_single(&sock, resp_bytes); tcp_write_single(&sock, resp_bytes);
debug!("message sent"); debug!("message sent");
sock.read(0u); sock.read(0u);
@ -1756,9 +1756,7 @@ mod test {
buf_write(sock_buf, expected_req); buf_write(sock_buf, expected_req);
// so contrived! // so contrived!
let actual_resp = do str::as_bytes(&expected_resp.to_str()) |resp_buf| { let actual_resp = buf_read(sock_buf, expected_resp.as_bytes().len());
buf_read(sock_buf, resp_buf.len())
};
let actual_req = server_result_po.recv(); let actual_req = server_result_po.recv();
debug!("REQ: expected: '%s' actual: '%s'", debug!("REQ: expected: '%s' actual: '%s'",
@ -1810,11 +1808,10 @@ mod test {
fn buf_write<W:io::Writer>(w: &W, val: &str) { fn buf_write<W:io::Writer>(w: &W, val: &str) {
debug!("BUF_WRITE: val len %?", val.len()); debug!("BUF_WRITE: val len %?", val.len());
do str::byte_slice(val) |b_slice| { let b_slice = val.as_bytes();
debug!("BUF_WRITE: b_slice len %?", debug!("BUF_WRITE: b_slice len %?",
b_slice.len()); b_slice.len());
w.write(b_slice) w.write(b_slice)
}
} }
fn buf_read<R:io::Reader>(r: &R, len: uint) -> ~str { fn buf_read<R:io::Reader>(r: &R, len: uint) -> ~str {
@ -1877,7 +1874,8 @@ mod test {
server_ch.send( server_ch.send(
str::from_bytes(data)); str::from_bytes(data));
debug!("SERVER: before write"); debug!("SERVER: before write");
tcp_write_single(&sock, str::to_bytes(resp_cell2.take())); let s = resp_cell2.take();
tcp_write_single(&sock, s.as_bytes().to_owned());
debug!("SERVER: after write.. die"); debug!("SERVER: after write.. die");
kill_ch.send(None); kill_ch.send(None);
} }
@ -1949,7 +1947,7 @@ mod test {
} }
else { else {
let sock = result::unwrap(connect_result); let sock = result::unwrap(connect_result);
let resp_bytes = str::to_bytes(resp); let resp_bytes = resp.as_bytes().to_owned();
tcp_write_single(&sock, resp_bytes); tcp_write_single(&sock, resp_bytes);
let read_result = sock.read(0u); let read_result = sock.read(0u);
if read_result.is_err() { if read_result.is_err() {

View File

@ -1060,7 +1060,7 @@ mod tests {
/* /*
assert_eq!(decode_form_urlencoded([]).len(), 0); assert_eq!(decode_form_urlencoded([]).len(), 0);
let s = str::to_bytes("a=1&foo+bar=abc&foo+bar=12+%3D+34"); let s = "a=1&foo+bar=abc&foo+bar=12+%3D+34".as_bytes();
let form = decode_form_urlencoded(s); let form = decode_form_urlencoded(s);
assert_eq!(form.len(), 2); assert_eq!(form.len(), 2);
assert_eq!(form.get_ref(&~"a"), &~[~"1"]); assert_eq!(form.get_ref(&~"a"), &~[~"1"]);

View File

@ -524,7 +524,7 @@ impl ToStrRadix for BigUint {
let s = uint::to_str_radix(*n as uint, radix); let s = uint::to_str_radix(*n as uint, radix);
str::from_chars(vec::from_elem(l - s.len(), '0')) + s str::from_chars(vec::from_elem(l - s.len(), '0')) + s
}).concat(); }).concat();
s.trim_left_chars(['0']).to_owned() s.trim_left_chars(&'0').to_owned()
} }
} }
} }
@ -534,7 +534,7 @@ impl FromStrRadix for BigUint {
pub fn from_str_radix(s: &str, radix: uint) pub fn from_str_radix(s: &str, radix: uint)
-> Option<BigUint> { -> Option<BigUint> {
BigUint::parse_bytes(str::to_bytes(s), radix) BigUint::parse_bytes(s.as_bytes(), radix)
} }
} }
@ -564,7 +564,7 @@ impl BigUint {
/// Creates and initializes an BigUint. /// Creates and initializes an BigUint.
pub fn from_slice(slice: &[BigDigit]) -> BigUint { pub fn from_slice(slice: &[BigDigit]) -> BigUint {
return BigUint::new(vec::to_owned(slice)); return BigUint::new(slice.to_owned());
} }
/// Creates and initializes an BigUint. /// Creates and initializes an BigUint.
@ -1090,7 +1090,7 @@ impl FromStrRadix for BigInt {
fn from_str_radix(s: &str, radix: uint) fn from_str_radix(s: &str, radix: uint)
-> Option<BigInt> { -> Option<BigInt> {
BigInt::parse_bytes(str::to_bytes(s), radix) BigInt::parse_bytes(s.as_bytes(), radix)
} }
} }

View File

@ -84,9 +84,9 @@ pub fn of_str(str: @~str) -> Rope {
* *
* # Return value * # Return value
* *
* A rope representing the same string as `str.substr(byte_offset, * A rope representing the same string as `str.slice(byte_offset,
* byte_len)`. Depending on `byte_len`, this rope may be empty, flat * byte_offset + byte_len)`. Depending on `byte_len`, this rope may
* or complex. * be empty, flat or complex.
* *
* # Performance note * # Performance note
* *
@ -564,7 +564,6 @@ pub mod node {
use rope::node; use rope::node;
use core::cast; use core::cast;
use core::str;
use core::uint; use core::uint;
use core::vec; use core::vec;
@ -588,7 +587,7 @@ pub mod node {
* * char_len - The number of chars in the leaf. * * char_len - The number of chars in the leaf.
* * content - Contents of the leaf. * * content - Contents of the leaf.
* *
* Note that we can have `char_len < str::char_len(content)`, if * Note that we can have `char_len < content.char_len()`, if
* this leaf is only a subset of the string. Also note that the * this leaf is only a subset of the string. Also note that the
* string can be shared between several ropes, e.g. for indexing * string can be shared between several ropes, e.g. for indexing
* purposes. * purposes.
@ -680,7 +679,7 @@ pub mod node {
*/ */
pub fn of_substr(str: @~str, byte_start: uint, byte_len: uint) -> @Node { pub fn of_substr(str: @~str, byte_start: uint, byte_len: uint) -> @Node {
return of_substr_unsafer(str, byte_start, byte_len, return of_substr_unsafer(str, byte_start, byte_len,
str::count_chars(*str, byte_start, byte_len)); str.slice(byte_start, byte_start + byte_len).char_len());
} }
/** /**
@ -734,7 +733,7 @@ pub mod node {
if i == 0u { first_leaf_char_len } if i == 0u { first_leaf_char_len }
else { hint_max_leaf_char_len }; else { hint_max_leaf_char_len };
let chunk_byte_len = let chunk_byte_len =
str::count_bytes(*str, offset, chunk_char_len); str.slice_from(offset).slice_chars(0, chunk_char_len).len();
nodes[i] = @Leaf(Leaf { nodes[i] = @Leaf(Leaf {
byte_offset: offset, byte_offset: offset,
byte_len: chunk_byte_len, byte_len: chunk_byte_len,
@ -938,7 +937,7 @@ pub mod node {
match (*node) { match (*node) {
node::Leaf(x) => { node::Leaf(x) => {
let char_len = let char_len =
str::count_chars(*x.content, byte_offset, byte_len); x.content.slice(byte_offset, byte_offset + byte_len).char_len();
return @Leaf(Leaf { return @Leaf(Leaf {
byte_offset: byte_offset, byte_offset: byte_offset,
byte_len: byte_len, byte_len: byte_len,
@ -1002,9 +1001,9 @@ pub mod node {
return node; return node;
} }
let byte_offset = let byte_offset =
str::count_bytes(*x.content, 0u, char_offset); x.content.slice_chars(0, char_offset).len();
let byte_len = let byte_len =
str::count_bytes(*x.content, byte_offset, char_len); x.content.slice_from(byte_offset).slice_chars(0, char_len).len();
return @Leaf(Leaf { return @Leaf(Leaf {
byte_offset: byte_offset, byte_offset: byte_offset,
byte_len: byte_len, byte_len: byte_len,
@ -1312,7 +1311,7 @@ mod tests {
let sample = @~"0123456789ABCDE"; let sample = @~"0123456789ABCDE";
let r = of_str(sample); let r = of_str(sample);
assert_eq!(char_len(r), str::char_len(*sample)); assert_eq!(char_len(r), sample.char_len());
assert!(rope_to_string(r) == *sample); assert!(rope_to_string(r) == *sample);
} }
@ -1328,7 +1327,7 @@ mod tests {
} }
let sample = @copy *buf; let sample = @copy *buf;
let r = of_str(sample); let r = of_str(sample);
assert!(char_len(r) == str::char_len(*sample)); assert_eq!(char_len(r), sample.char_len());
assert!(rope_to_string(r) == *sample); assert!(rope_to_string(r) == *sample);
let mut string_iter = 0u; let mut string_iter = 0u;
@ -1374,7 +1373,7 @@ mod tests {
} }
} }
assert_eq!(len, str::char_len(*sample)); assert_eq!(len, sample.char_len());
} }
#[test] #[test]

View File

@ -25,7 +25,6 @@
use core::prelude::*; use core::prelude::*;
use core::iterator::IteratorUtil; use core::iterator::IteratorUtil;
use core::str;
use core::uint; use core::uint;
use core::vec; use core::vec;
@ -246,8 +245,7 @@ pub fn sha1() -> @Sha1 {
} }
fn input(&mut self, msg: &const [u8]) { add_input(self, msg); } fn input(&mut self, msg: &const [u8]) { add_input(self, msg); }
fn input_str(&mut self, msg: &str) { fn input_str(&mut self, msg: &str) {
let bs = str::to_bytes(msg); add_input(self, msg.as_bytes());
add_input(self, bs);
} }
fn result(&mut self) -> ~[u8] { return mk_result(self); } fn result(&mut self) -> ~[u8] { return mk_result(self); }
fn result_str(&mut self) -> ~str { fn result_str(&mut self) -> ~str {

View File

@ -13,7 +13,6 @@
use core::prelude::*; use core::prelude::*;
use core::iterator::*; use core::iterator::*;
use core::vec;
use core::f64; use core::f64;
use core::cmp; use core::cmp;
use core::num; use core::num;
@ -57,7 +56,7 @@ impl<'self> Stats for &'self [f64] {
fn median(self) -> f64 { fn median(self) -> f64 {
assert!(self.len() != 0); assert!(self.len() != 0);
let mut tmp = vec::to_owned(self); let mut tmp = self.to_owned();
sort::tim_sort(tmp); sort::tim_sort(tmp);
if tmp.len() & 1 == 0 { if tmp.len() & 1 == 0 {
let m = tmp.len() / 2; let m = tmp.len() / 2;

View File

@ -85,11 +85,14 @@ pub fn expand(cap: &[u8], params: &mut [Param], sta: &mut [Param], dyn: &mut [Pa
_ => return Err(~"a non-char was used with %c") _ => return Err(~"a non-char was used with %c")
}, },
's' => match stack.pop() { 's' => match stack.pop() {
String(s) => output.push_all(s.to_bytes()), String(s) => output.push_all(s.as_bytes()),
_ => return Err(~"a non-str was used with %s") _ => return Err(~"a non-str was used with %s")
}, },
'd' => match stack.pop() { 'd' => match stack.pop() {
Number(x) => output.push_all(x.to_str().to_bytes()), Number(x) => {
let s = x.to_str();
output.push_all(s.as_bytes())
}
_ => return Err(~"a non-number was used with %d") _ => return Err(~"a non-number was used with %d")
}, },
'p' => state = PushParam, 'p' => state = PushParam,

View File

@ -12,7 +12,7 @@
/// Does not support hashed database, only filesystem! /// Does not support hashed database, only filesystem!
use core::prelude::*; use core::prelude::*;
use core::{os}; use core::{os, str};
use core::os::getenv; use core::os::getenv;
use core::io::{file_reader, Reader}; use core::io::{file_reader, Reader};
use core::iterator::IteratorUtil; use core::iterator::IteratorUtil;
@ -27,7 +27,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<~path> {
let homedir = os::homedir(); let homedir = os::homedir();
let mut dirs_to_search = ~[]; let mut dirs_to_search = ~[];
let first_char = term.substr(0, 1); let first_char = term.char_at(0);
// Find search directory // Find search directory
match getenv("TERMINFO") { match getenv("TERMINFO") {
@ -57,12 +57,12 @@ pub fn get_dbpath_for_term(term: &str) -> Option<~path> {
// Look for the terminal in all of the search directories // Look for the terminal in all of the search directories
for dirs_to_search.each |p| { for dirs_to_search.each |p| {
let newp = ~p.push_many(&[first_char.to_owned(), term.to_owned()]); let newp = ~p.push_many(&[str::from_char(first_char), term.to_owned()]);
if os::path_exists(p) && os::path_exists(newp) { if os::path_exists(p) && os::path_exists(newp) {
return Some(newp); return Some(newp);
} }
// on some installations the dir is named after the hex of the char (e.g. OS X) // on some installations the dir is named after the hex of the char (e.g. OS X)
let newp = ~p.push_many(&[fmt!("%x", first_char[0] as uint), term.to_owned()]); let newp = ~p.push_many(&[fmt!("%x", first_char as uint), term.to_owned()]);
if os::path_exists(p) && os::path_exists(newp) { if os::path_exists(p) && os::path_exists(newp) {
return Some(newp); return Some(newp);
} }

View File

@ -1029,7 +1029,7 @@ mod tests {
fn test(s: &str, format: &str) -> bool { fn test(s: &str, format: &str) -> bool {
match strptime(s, format) { match strptime(s, format) {
Ok(ref tm) => tm.strftime(format) == str::to_owned(s), Ok(ref tm) => tm.strftime(format) == s.to_owned(),
Err(e) => fail!(e) Err(e) => fail!(e)
} }
} }

View File

@ -769,10 +769,10 @@ mod test_treemap {
fn u8_map() { fn u8_map() {
let mut m = TreeMap::new(); let mut m = TreeMap::new();
let k1 = str::to_bytes("foo"); let k1 = "foo".as_bytes();
let k2 = str::to_bytes("bar"); let k2 = "bar".as_bytes();
let v1 = str::to_bytes("baz"); let v1 = "baz".as_bytes();
let v2 = str::to_bytes("foobar"); let v2 = "foobar".as_bytes();
m.insert(copy k1, copy v1); m.insert(copy k1, copy v1);
m.insert(copy k2, copy v2); m.insert(copy k2, copy v2);

View File

@ -1368,7 +1368,7 @@ mod test {
// In C, this would be a malloc'd or stack-allocated // In C, this would be a malloc'd or stack-allocated
// struct that we'd cast to a void* and store as the // struct that we'd cast to a void* and store as the
// data field in our uv_connect_t struct // data field in our uv_connect_t struct
let req_str_bytes = str::to_bytes(req_str); let req_str_bytes = req_str.as_bytes();
let req_msg_ptr: *u8 = vec::raw::to_ptr(req_str_bytes); let req_msg_ptr: *u8 = vec::raw::to_ptr(req_str_bytes);
debug!("req_msg ptr: %u", req_msg_ptr as uint); debug!("req_msg ptr: %u", req_msg_ptr as uint);
let req_msg = ~[ let req_msg = ~[
@ -1626,7 +1626,7 @@ mod test {
let server_write_req = write_t(); let server_write_req = write_t();
let server_write_req_ptr: *uv_write_t = &server_write_req; let server_write_req_ptr: *uv_write_t = &server_write_req;
let resp_str_bytes = str::to_bytes(server_resp_msg); let resp_str_bytes = server_resp_msg.as_bytes();
let resp_msg_ptr: *u8 = vec::raw::to_ptr(resp_str_bytes); let resp_msg_ptr: *u8 = vec::raw::to_ptr(resp_str_bytes);
debug!("resp_msg ptr: %u", resp_msg_ptr as uint); debug!("resp_msg ptr: %u", resp_msg_ptr as uint);
let resp_msg = ~[ let resp_msg = ~[

View File

@ -328,7 +328,7 @@ pub fn check_variants_T<T:Copy>(crate: @ast::crate,
if L < 100 { if L < 100 {
do under(uint::min(L, 20)) |i| { do under(uint::min(L, 20)) |i| {
error!("Replacing... #%?", uint::to_str(i)); error!("Replacing... #%?", uint::to_str(i));
let fname = str::to_owned(filename.to_str()); let fname = filename.to_str();
do under(uint::min(L, 30)) |j| { do under(uint::min(L, 30)) |j| {
let fname = fname.to_str(); let fname = fname.to_str();
error!("With... %?", stringifier(things[j], intr)); error!("With... %?", stringifier(things[j], intr));

View File

@ -36,7 +36,6 @@ use core::io;
use core::os; use core::os;
use core::run; use core::run;
use core::libc::exit; use core::libc::exit;
use core::str;
// For bootstrapping. // For bootstrapping.
mod std { mod std {
@ -225,7 +224,7 @@ fn usage() {
); );
for commands.each |command| { for commands.each |command| {
let padding = str::repeat(" ", indent - command.cmd.len()); let padding = " ".repeat(indent - command.cmd.len());
io::println(fmt!(" %s%s%s", io::println(fmt!(" %s%s%s",
command.cmd, padding, command.usage_line)); command.cmd, padding, command.usage_line));
} }

View File

@ -50,8 +50,7 @@ pub enum output_type {
} }
fn write_string<W:Writer>(writer: &mut W, string: &str) { fn write_string<W:Writer>(writer: &mut W, string: &str) {
let buffer = str::as_bytes_slice(string); writer.write(string.as_bytes());
writer.write(buffer);
} }
pub fn llvm_err(sess: Session, msg: ~str) -> ! { pub fn llvm_err(sess: Session, msg: ~str) -> ! {
@ -637,7 +636,7 @@ pub fn symbol_hash(tcx: ty::ctxt,
write_string(symbol_hasher, encoder::encoded_ty(tcx, t)); write_string(symbol_hasher, encoder::encoded_ty(tcx, t));
let mut hash = truncated_hash_result(symbol_hasher); let mut hash = truncated_hash_result(symbol_hasher);
// Prefix with _ so that it never blends into adjacent digits // Prefix with _ so that it never blends into adjacent digits
str::unshift_char(&mut hash, '_'); hash.unshift_char('_');
// tjc: allocation is unfortunate; need to change core::hash // tjc: allocation is unfortunate; need to change core::hash
hash.to_managed() hash.to_managed()
} }

View File

@ -29,7 +29,6 @@ use core::hashmap::HashMap;
use core::int; use core::int;
use core::io; use core::io;
use core::os; use core::os;
use core::str;
use core::vec; use core::vec;
use extra::getopts::groups::{optopt, optmulti, optflag, optflagopt}; use extra::getopts::groups::{optopt, optmulti, optflag, optflagopt};
use extra::getopts::{opt_present}; use extra::getopts::{opt_present};
@ -96,9 +95,9 @@ pub fn default_configuration(sess: Session, argv0: @~str, input: &input) ->
}; };
return ~[ // Target bindings. return ~[ // Target bindings.
attr::mk_word_item(@str::to_owned(os::FAMILY)), attr::mk_word_item(@os::FAMILY.to_owned()),
mk(@~"target_os", @tos), mk(@~"target_os", @tos),
mk(@~"target_family", @str::to_owned(os::FAMILY)), mk(@~"target_family", @os::FAMILY.to_owned()),
mk(@~"target_arch", @arch), mk(@~"target_arch", @arch),
mk(@~"target_endian", @end), mk(@~"target_endian", @end),
mk(@~"target_word_size", @wordsz), mk(@~"target_word_size", @wordsz),
@ -590,12 +589,12 @@ pub fn build_session_options(binary: @~str,
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use // FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
// to_ascii_consume and to_str_consume to not do a unnecessary copy. // to_ascii_consume and to_str_consume to not do a unnecessary copy.
let level_short = level_name.substr(0,1); let level_short = level_name.slice_chars(0, 1);
let level_short = level_short.to_ascii().to_upper().to_str_ascii(); let level_short = level_short.to_ascii().to_upper().to_str_ascii();
let flags = vec::append(getopts::opt_strs(matches, level_short), let flags = vec::append(getopts::opt_strs(matches, level_short),
getopts::opt_strs(matches, level_name)); getopts::opt_strs(matches, level_name));
for flags.each |lint_name| { for flags.each |lint_name| {
let lint_name = str::replace(*lint_name, "-", "_"); let lint_name = lint_name.replace("-", "_");
match lint_dict.find(&lint_name) { match lint_dict.find(&lint_name) {
None => { None => {
early_error(demitter, fmt!("unknown %s flag: %s", early_error(demitter, fmt!("unknown %s flag: %s",

View File

@ -570,7 +570,7 @@ pub fn maybe_get_item_ast(cdata: cmd, tcx: ty::ctxt,
let item_doc = lookup_item(id, cdata.data); let item_doc = lookup_item(id, cdata.data);
let path = { let path = {
let item_path = item_path(item_doc); let item_path = item_path(item_doc);
vec::to_owned(item_path.init()) item_path.init().to_owned()
}; };
match decode_inlined_item(cdata, tcx, copy path, item_doc) { match decode_inlined_item(cdata, tcx, copy path, item_doc) {
Some(ref ii) => csearch::found((/*bad*/copy *ii)), Some(ref ii) => csearch::found((/*bad*/copy *ii)),

View File

@ -202,7 +202,8 @@ fn encode_type_param_bounds(ebml_w: &mut writer::Encoder,
fn encode_variant_id(ebml_w: &mut writer::Encoder, vid: def_id) { fn encode_variant_id(ebml_w: &mut writer::Encoder, vid: def_id) {
ebml_w.start_tag(tag_items_data_item_variant); ebml_w.start_tag(tag_items_data_item_variant);
ebml_w.writer.write(str::to_bytes(def_to_str(vid))); let s = def_to_str(vid);
ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -271,7 +272,7 @@ fn encode_symbol(ecx: @EncodeContext,
match ecx.item_symbols.find(&id) { match ecx.item_symbols.find(&id) {
Some(x) => { Some(x) => {
debug!("encode_symbol(id=%?, str=%s)", id, *x); debug!("encode_symbol(id=%?, str=%s)", id, *x);
ebml_w.writer.write(str::to_bytes(*x)); ebml_w.writer.write(x.as_bytes());
} }
None => { None => {
ecx.diag.handler().bug( ecx.diag.handler().bug(
@ -285,7 +286,7 @@ fn encode_discriminant(ecx: @EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
id: node_id) { id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol); ebml_w.start_tag(tag_items_data_item_symbol);
ebml_w.writer.write(str::to_bytes(*ecx.discrim_symbols.get_copy(&id))); ebml_w.writer.write(ecx.discrim_symbols.get_copy(&id).as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -293,13 +294,15 @@ fn encode_disr_val(_: @EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
disr_val: int) { disr_val: int) {
ebml_w.start_tag(tag_disr_val); ebml_w.start_tag(tag_disr_val);
ebml_w.writer.write(str::to_bytes(int::to_str(disr_val))); let s = int::to_str(disr_val);
ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
fn encode_parent_item(ebml_w: &mut writer::Encoder, id: def_id) { fn encode_parent_item(ebml_w: &mut writer::Encoder, id: def_id) {
ebml_w.start_tag(tag_items_data_parent_item); ebml_w.start_tag(tag_items_data_parent_item);
ebml_w.writer.write(str::to_bytes(def_to_str(id))); let s = def_to_str(id);
ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -954,7 +957,8 @@ fn encode_info_for_item(ecx: @EncodeContext,
for methods.each |m| { for methods.each |m| {
ebml_w.start_tag(tag_item_impl_method); ebml_w.start_tag(tag_item_impl_method);
let method_def_id = local_def(m.id); let method_def_id = local_def(m.id);
ebml_w.writer.write(str::to_bytes(def_to_str(method_def_id))); let s = def_to_str(method_def_id);
ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
for opt_trait.iter().advance |ast_trait_ref| { for opt_trait.iter().advance |ast_trait_ref| {
@ -1218,7 +1222,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @meta_item) {
meta_word(name) => { meta_word(name) => {
ebml_w.start_tag(tag_meta_item_word); ebml_w.start_tag(tag_meta_item_word);
ebml_w.start_tag(tag_meta_item_name); ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::to_bytes(*name)); ebml_w.writer.write(name.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -1227,10 +1231,10 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @meta_item) {
lit_str(value) => { lit_str(value) => {
ebml_w.start_tag(tag_meta_item_name_value); ebml_w.start_tag(tag_meta_item_name_value);
ebml_w.start_tag(tag_meta_item_name); ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::to_bytes(*name)); ebml_w.writer.write(name.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_meta_item_value); ebml_w.start_tag(tag_meta_item_value);
ebml_w.writer.write(str::to_bytes(*value)); ebml_w.writer.write(value.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -1240,7 +1244,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @meta_item) {
meta_list(name, ref items) => { meta_list(name, ref items) => {
ebml_w.start_tag(tag_meta_item_list); ebml_w.start_tag(tag_meta_item_list);
ebml_w.start_tag(tag_meta_item_name); ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::to_bytes(*name)); ebml_w.writer.write(name.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
for items.each |inner_item| { for items.each |inner_item| {
encode_meta_item(ebml_w, *inner_item); encode_meta_item(ebml_w, *inner_item);
@ -1398,20 +1402,21 @@ fn encode_crate_dep(ecx: @EncodeContext,
dep: decoder::crate_dep) { dep: decoder::crate_dep) {
ebml_w.start_tag(tag_crate_dep); ebml_w.start_tag(tag_crate_dep);
ebml_w.start_tag(tag_crate_dep_name); ebml_w.start_tag(tag_crate_dep_name);
ebml_w.writer.write(str::to_bytes(*ecx.tcx.sess.str_of(dep.name))); let s = ecx.tcx.sess.str_of(dep.name);
ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_crate_dep_vers); ebml_w.start_tag(tag_crate_dep_vers);
ebml_w.writer.write(str::to_bytes(*dep.vers)); ebml_w.writer.write(dep.vers.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_crate_dep_hash); ebml_w.start_tag(tag_crate_dep_hash);
ebml_w.writer.write(str::to_bytes(*dep.hash)); ebml_w.writer.write(dep.hash.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
fn encode_hash(ebml_w: &mut writer::Encoder, hash: &str) { fn encode_hash(ebml_w: &mut writer::Encoder, hash: &str) {
ebml_w.start_tag(tag_crate_hash); ebml_w.start_tag(tag_crate_hash);
ebml_w.writer.write(str::to_bytes(hash)); ebml_w.writer.write(hash.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -1516,7 +1521,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
let writer_bytes: &mut ~[u8] = wr.bytes; let writer_bytes: &mut ~[u8] = wr.bytes;
vec::to_owned(metadata_encoding_version) + metadata_encoding_version.to_owned() +
flate::deflate_bytes(*writer_bytes) flate::deflate_bytes(*writer_bytes)
} }

View File

@ -13,7 +13,6 @@ use core::prelude::*;
use core::option; use core::option;
use core::os; use core::os;
use core::result; use core::result;
use core::str;
// A module for searching for libraries // A module for searching for libraries
// FIXME (#2658): I'm not happy how this module turned out. Should // FIXME (#2658): I'm not happy how this module turned out. Should
@ -81,7 +80,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
@FileSearchImpl { @FileSearchImpl {
sysroot: sysroot, sysroot: sysroot,
addl_lib_search_paths: addl_lib_search_paths, addl_lib_search_paths: addl_lib_search_paths,
target_triple: str::to_owned(target_triple) target_triple: target_triple.to_owned()
} as @FileSearch } as @FileSearch
} }
@ -107,7 +106,7 @@ pub fn search<T:Copy>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
pub fn relative_target_lib_path(target_triple: &str) -> Path { pub fn relative_target_lib_path(target_triple: &str) -> Path {
Path(libdir()).push_many([~"rustc", Path(libdir()).push_many([~"rustc",
str::to_owned(target_triple), target_triple.to_owned(),
libdir()]) libdir()])
} }

View File

@ -80,7 +80,7 @@ fn libname(cx: &Context) -> (~str, ~str) {
os_freebsd => (freebsd::DLL_PREFIX, freebsd::DLL_SUFFIX), os_freebsd => (freebsd::DLL_PREFIX, freebsd::DLL_SUFFIX),
}; };
(str::to_owned(dll_prefix), str::to_owned(dll_suffix)) (dll_prefix.to_owned(), dll_suffix.to_owned())
} }
fn find_library_crate_aux( fn find_library_crate_aux(

View File

@ -495,7 +495,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
match cx.tcx.def_map.find(&pat_id) { match cx.tcx.def_map.find(&pat_id) {
Some(&def_variant(_, id)) => { Some(&def_variant(_, id)) => {
if variant(id) == *ctor_id { if variant(id) == *ctor_id {
Some(vec::to_owned(r.tail())) Some(r.tail().to_owned())
} else { } else {
None None
} }
@ -533,7 +533,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
_ => fail!("type error") _ => fail!("type error")
}; };
if match_ { if match_ {
Some(vec::to_owned(r.tail())) Some(r.tail().to_owned())
} else { } else {
None None
} }
@ -580,7 +580,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
_ => fail!("type error") _ => fail!("type error")
}; };
if match_ { if match_ {
Some(vec::to_owned(r.tail())) Some(r.tail().to_owned())
} else { } else {
None None
} }
@ -590,7 +590,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
Some(args) => args, Some(args) => args,
None => vec::from_elem(arity, wild()) None => vec::from_elem(arity, wild())
}; };
Some(vec::append(args, vec::to_owned(r.tail()))) Some(vec::append(args, r.tail().to_owned()))
} }
def_variant(_, _) => None, def_variant(_, _) => None,
@ -602,7 +602,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
Some(args) => new_args = args, Some(args) => new_args = args,
None => new_args = vec::from_elem(arity, wild()) None => new_args = vec::from_elem(arity, wild())
} }
Some(vec::append(new_args, vec::to_owned(r.tail()))) Some(vec::append(new_args, r.tail().to_owned()))
} }
_ => None _ => None
} }
@ -620,7 +620,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
_ => wild() _ => wild()
} }
}); });
Some(vec::append(args, vec::to_owned(r.tail()))) Some(vec::append(args, r.tail().to_owned()))
} else { } else {
None None
} }
@ -651,7 +651,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
_ => wild() _ => wild()
} }
}); });
Some(vec::append(args, vec::to_owned(r.tail()))) Some(vec::append(args, r.tail().to_owned()))
} }
} }
} }
@ -687,14 +687,14 @@ pub fn specialize(cx: @MatchCheckCtxt,
single => true, single => true,
_ => fail!("type error") _ => fail!("type error")
}; };
if match_ { Some(vec::to_owned(r.tail())) } else { None } if match_ { Some(r.tail().to_owned()) } else { None }
} }
pat_range(lo, hi) => { pat_range(lo, hi) => {
let (c_lo, c_hi) = match *ctor_id { let (c_lo, c_hi) = match *ctor_id {
val(ref v) => ((/*bad*/copy *v), (/*bad*/copy *v)), val(ref v) => ((/*bad*/copy *v), (/*bad*/copy *v)),
range(ref lo, ref hi) => range(ref lo, ref hi) =>
((/*bad*/copy *lo), (/*bad*/copy *hi)), ((/*bad*/copy *lo), (/*bad*/copy *hi)),
single => return Some(vec::to_owned(r.tail())), single => return Some(r.tail().to_owned()),
_ => fail!("type error") _ => fail!("type error")
}; };
let v_lo = eval_const_expr(cx.tcx, lo); let v_lo = eval_const_expr(cx.tcx, lo);
@ -704,7 +704,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
let m2 = compare_const_vals(&c_hi, &v_hi); let m2 = compare_const_vals(&c_hi, &v_hi);
match (m1, m2) { match (m1, m2) {
(Some(val1), Some(val2)) if val1 >= 0 && val2 <= 0 => { (Some(val1), Some(val2)) if val1 >= 0 && val2 <= 0 => {
Some(vec::to_owned(r.tail())) Some(r.tail().to_owned())
}, },
(Some(_), Some(_)) => None, (Some(_), Some(_)) => None,
_ => { _ => {
@ -745,7 +745,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
} }
pub fn default(cx: @MatchCheckCtxt, r: &[@pat]) -> Option<~[@pat]> { pub fn default(cx: @MatchCheckCtxt, r: &[@pat]) -> Option<~[@pat]> {
if is_wild(cx, r[0]) { Some(vec::to_owned(r.tail())) } if is_wild(cx, r[0]) { Some(r.tail().to_owned()) }
else { None } else { None }
} }

View File

@ -23,7 +23,6 @@ use core::i16;
use core::i32; use core::i32;
use core::i64; use core::i64;
use core::i8; use core::i8;
use core::str;
use core::u16; use core::u16;
use core::u32; use core::u32;
use core::u64; use core::u64;
@ -375,7 +374,7 @@ impl Context {
fmt!("%s [-%c %s%s]", msg, match level { fmt!("%s [-%c %s%s]", msg, match level {
warn => 'W', deny => 'D', forbid => 'F', warn => 'W', deny => 'D', forbid => 'F',
allow => fail!() allow => fail!()
}, str::replace(self.lint_to_str(lint), "_", "-"), }, self.lint_to_str(lint).replace("_", "-"),
if src == Default { " (default)" } else { "" }) if src == Default { " (default)" } else { "" })
}, },
Node(src) => { Node(src) => {
@ -842,7 +841,7 @@ fn check_item_non_camel_case_types(cx: &Context, it: @ast::item) {
fn is_camel_case(cx: ty::ctxt, ident: ast::ident) -> bool { fn is_camel_case(cx: ty::ctxt, ident: ast::ident) -> bool {
let ident = cx.sess.str_of(ident); let ident = cx.sess.str_of(ident);
assert!(!ident.is_empty()); assert!(!ident.is_empty());
let ident = ident.trim_chars(&['_']); let ident = ident.trim_chars(&'_');
char::is_uppercase(ident.char_at(0)) && char::is_uppercase(ident.char_at(0)) &&
!ident.contains_char('_') !ident.contains_char('_')
} }

View File

@ -2678,14 +2678,14 @@ impl Resolver {
match module_prefix_result { match module_prefix_result {
Failed => { Failed => {
let mpath = self.idents_to_str(module_path); let mpath = self.idents_to_str(module_path);
match self.idents_to_str(module_path).rfind(':') { match mpath.rfind(':') {
Some(idx) => { Some(idx) => {
self.session.span_err(span, fmt!("unresolved import: could not find `%s` \ self.session.span_err(span, fmt!("unresolved import: could not find `%s` \
in `%s`", mpath.substr(idx, in `%s`",
mpath.len() - idx), // idx +- 1 to account for the colons
// idx - 1 to account for the extra // on either side
// colon mpath.slice_from(idx + 1),
mpath.substr(0, idx - 1))); mpath.slice_to(idx - 1)));
}, },
None => (), None => (),
}; };

View File

@ -48,7 +48,6 @@ use core::iterator::IteratorUtil;
use core::container::Map; use core::container::Map;
use core::libc::c_ulonglong; use core::libc::c_ulonglong;
use core::option::{Option, Some, None}; use core::option::{Option, Some, None};
use core::vec;
use lib::llvm::{ValueRef, TypeRef, True, IntEQ, IntNE}; use lib::llvm::{ValueRef, TypeRef, True, IntEQ, IntNE};
use middle::trans::_match; use middle::trans::_match;
@ -218,7 +217,7 @@ fn mk_struct(cx: @CrateContext, tys: &[ty::t], packed: bool) -> Struct {
size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64, size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64,
align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64, align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64,
packed: packed, packed: packed,
fields: vec::to_owned(tys) fields: tys.to_owned()
} }
} }

View File

@ -114,7 +114,7 @@ impl get_insn_ctxt for @CrateContext {
fn insn_ctxt(&self, s: &str) -> icx_popper { fn insn_ctxt(&self, s: &str) -> icx_popper {
debug!("new insn_ctxt: %s", s); debug!("new insn_ctxt: %s", s);
if self.sess.count_llvm_insns() { if self.sess.count_llvm_insns() {
self.stats.llvm_insn_ctxt.push(str::to_owned(s)); self.stats.llvm_insn_ctxt.push(s.to_owned());
} }
icx_popper(*self) icx_popper(*self)
} }

View File

@ -885,9 +885,9 @@ pub fn add_comment(bcx: block, text: &str) {
unsafe { unsafe {
let ccx = bcx.ccx(); let ccx = bcx.ccx();
if ccx.sess.asm_comments() { if ccx.sess.asm_comments() {
let sanitized = str::replace(text, "$", ""); let sanitized = text.replace("$", "");
let comment_text = ~"# " + let comment_text = ~"# " +
str::replace(sanitized, "\n", "\n\t# "); sanitized.replace("\n", "\n\t# ");
let asm = str::as_c_str(comment_text, |c| { let asm = str::as_c_str(comment_text, |c| {
str::as_c_str("", |e| { str::as_c_str("", |e| {
count_insn(bcx, "inlineasm"); count_insn(bcx, "inlineasm");

View File

@ -1704,5 +1704,5 @@ fn trans_assign_op(bcx: block,
} }
fn shorten(x: ~str) -> ~str { fn shorten(x: ~str) -> ~str {
if x.len() > 60 { x.substr(0, 60).to_owned() } else { x } if x.char_len() > 60 { x.slice_chars(0, 60).to_owned() } else { x }
} }

View File

@ -3898,7 +3898,7 @@ pub fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path {
} }
ast_map::node_variant(ref variant, _, path) => { ast_map::node_variant(ref variant, _, path) => {
vec::append_one(vec::to_owned(vec::init(*path)), vec::append_one(path.init().to_owned(),
ast_map::path_name((*variant).node.name)) ast_map::path_name((*variant).node.name))
} }

View File

@ -209,7 +209,7 @@ Available lint options:
io::println(fmt!(" %s %7.7s %s\n", io::println(fmt!(" %s %7.7s %s\n",
padded(max_key, "----"), "-------", "-------")); padded(max_key, "----"), "-------", "-------"));
for lint_dict.each |k, v| { for lint_dict.each |k, v| {
let k = str::replace(*k, "_", "-"); let k = k.replace("_", "-");
io::println(fmt!(" %s %7.7s %s", io::println(fmt!(" %s %7.7s %s",
padded(max_key, k), padded(max_key, k),
match v.default { match v.default {

View File

@ -17,7 +17,6 @@ an AST's attributes.
use core::prelude::*; use core::prelude::*;
use core::str;
use syntax::ast; use syntax::ast;
use syntax::attr; use syntax::attr;

View File

@ -262,7 +262,7 @@ mod test {
.. default_config(&Path("test")) .. default_config(&Path("test"))
}; };
let mock_process_output: ~fn(&str, &[~str]) -> ProcessOutput = |_, _| { let mock_process_output: ~fn(&str, &[~str]) -> ProcessOutput = |_, _| {
ProcessOutput { status: 0, output: "pandoc 1.8.2.1".to_bytes(), error: ~[] } ProcessOutput { status: 0, output: "pandoc 1.8.2.1".as_bytes().to_owned(), error: ~[] }
}; };
let result = maybe_find_pandoc(&config, None, mock_process_output); let result = maybe_find_pandoc(&config, None, mock_process_output);
assert!(result == result::Ok(Some(~"pandoc"))); assert!(result == result::Ok(Some(~"pandoc")));

View File

@ -108,7 +108,7 @@ fn first_sentence(s: ~str) -> Option<~str> {
let paras = paragraphs(s); let paras = paragraphs(s);
if !paras.is_empty() { if !paras.is_empty() {
let first_para = paras.head(); let first_para = paras.head();
Some(str::replace(first_sentence_(*first_para), "\n", " ")) Some(first_sentence_(*first_para).replace("\n", " "))
} else { } else {
None None
} }
@ -131,13 +131,13 @@ fn first_sentence_(s: &str) -> ~str {
}); });
match idx { match idx {
Some(idx) if idx > 2u => { Some(idx) if idx > 2u => {
str::to_owned(s.slice(0, idx - 1)) s.slice_to(idx - 1).to_owned()
} }
_ => { _ => {
if s.ends_with(".") { if s.ends_with(".") {
str::to_owned(s) s.to_owned()
} else { } else {
str::to_owned(s) s.to_owned()
} }
} }
} }

View File

@ -13,14 +13,12 @@
use pass::Pass; use pass::Pass;
use text_pass; use text_pass;
use core::str;
pub fn mk_pass() -> Pass { pub fn mk_pass() -> Pass {
text_pass::mk_pass(~"escape", escape) text_pass::mk_pass(~"escape", escape)
} }
fn escape(s: &str) -> ~str { fn escape(s: &str) -> ~str {
str::replace(s, "\\", "\\\\") s.replace("\\", "\\\\")
} }
#[test] #[test]

View File

@ -22,8 +22,6 @@ use markdown_pass;
use markdown_writer; use markdown_writer;
use pass::Pass; use pass::Pass;
use core::str;
pub fn mk_pass(config: config::Config) -> Pass { pub fn mk_pass(config: config::Config) -> Pass {
Pass { Pass {
name: ~"markdown_index", name: ~"markdown_index",
@ -128,24 +126,24 @@ pub fn pandoc_header_id(header: &str) -> ~str {
return header; return header;
fn remove_formatting(s: &str) -> ~str { fn remove_formatting(s: &str) -> ~str {
str::replace(s, "`", "") s.replace("`", "")
} }
fn remove_punctuation(s: &str) -> ~str { fn remove_punctuation(s: &str) -> ~str {
let s = str::replace(s, "<", ""); let s = s.replace("<", "");
let s = str::replace(s, ">", ""); let s = s.replace(">", "");
let s = str::replace(s, "[", ""); let s = s.replace("[", "");
let s = str::replace(s, "]", ""); let s = s.replace("]", "");
let s = str::replace(s, "(", ""); let s = s.replace("(", "");
let s = str::replace(s, ")", ""); let s = s.replace(")", "");
let s = str::replace(s, "@~", ""); let s = s.replace("@~", "");
let s = str::replace(s, "~", ""); let s = s.replace("~", "");
let s = str::replace(s, "/", ""); let s = s.replace("/", "");
let s = str::replace(s, ":", ""); let s = s.replace(":", "");
let s = str::replace(s, "&", ""); let s = s.replace("&", "");
let s = str::replace(s, "^", ""); let s = s.replace("^", "");
let s = str::replace(s, ",", ""); let s = s.replace(",", "");
let s = str::replace(s, "'", ""); let s = s.replace("'", "");
let s = str::replace(s, "+", ""); let s = s.replace("+", "");
return s; return s;
} }
fn replace_with_hyphens(s: &str) -> ~str { fn replace_with_hyphens(s: &str) -> ~str {
@ -153,8 +151,8 @@ pub fn pandoc_header_id(header: &str) -> ~str {
// XXX: Hacky implementation here that only covers // XXX: Hacky implementation here that only covers
// one or two spaces. // one or two spaces.
let s = s.trim(); let s = s.trim();
let s = str::replace(s, " ", "-"); let s = s.replace(" ", "-");
let s = str::replace(s, " ", "-"); let s = s.replace(" ", "-");
return s; return s;
} }
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use // FIXME: #4318 Instead of to_ascii and to_str_ascii, could use

View File

@ -114,7 +114,7 @@ fn make_title(page: doc::Page) -> ~str {
} }
}; };
let title = markdown_pass::header_text(item); let title = markdown_pass::header_text(item);
let title = str::replace(title, "`", ""); let title = title.replace("`", "");
return title; return title;
} }

View File

@ -284,7 +284,7 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
for args.each |arg| { for args.each |arg| {
let (crate, filename) = let (crate, filename) =
if arg.ends_with(".rs") || arg.ends_with(".rc") { if arg.ends_with(".rs") || arg.ends_with(".rc") {
(arg.substr(0, arg.len() - 3).to_owned(), copy *arg) (arg.slice_to(arg.len() - 3).to_owned(), copy *arg)
} else { } else {
(copy *arg, arg + ".rs") (copy *arg, arg + ".rs")
}; };
@ -342,7 +342,8 @@ pub fn run_line(repl: &mut Repl, in: @io::Reader, out: @io::Writer, line: ~str,
// FIXME #5898: conflicts with Cell.take(), so can't be at the top level // FIXME #5898: conflicts with Cell.take(), so can't be at the top level
use core::iterator::IteratorUtil; use core::iterator::IteratorUtil;
let full = line.substr(1, line.len() - 1); // drop the : and the \n (one byte each)
let full = line.slice(1, line.len() - 1);
let split: ~[~str] = full.word_iter().transform(|s| s.to_owned()).collect(); let split: ~[~str] = full.word_iter().transform(|s| s.to_owned()).collect();
let len = split.len(); let len = split.len();

View File

@ -12,7 +12,7 @@
use core::path::Path; use core::path::Path;
use core::option::Some; use core::option::Some;
use core::{hash, str}; use core::hash;
use core::rt::io::Writer; use core::rt::io::Writer;
use core::hash::Streaming; use core::hash::Streaming;
@ -32,7 +32,7 @@ pub fn normalize(p_: RemotePath) -> LocalPath {
match p.filestem() { match p.filestem() {
None => LocalPath(p), None => LocalPath(p),
Some(st) => { Some(st) => {
let replaced = str::replace(st, "-", "_"); let replaced = st.replace("-", "_");
if replaced != st { if replaced != st {
LocalPath(p.with_filestem(replaced)) LocalPath(p.with_filestem(replaced))
} }
@ -44,8 +44,7 @@ pub fn normalize(p_: RemotePath) -> LocalPath {
} }
pub fn write<W: Writer>(writer: &mut W, string: &str) { pub fn write<W: Writer>(writer: &mut W, string: &str) {
let buffer = str::as_bytes_slice(string); writer.write(string.as_bytes());
writer.write(buffer);
} }
pub fn hash(data: ~str) -> ~str { pub fn hash(data: ~str) -> ~str {

View File

@ -183,7 +183,7 @@ impl PkgSrc {
if self.libs.is_empty() && self.mains.is_empty() if self.libs.is_empty() && self.mains.is_empty()
&& self.tests.is_empty() && self.benchs.is_empty() { && self.tests.is_empty() && self.benchs.is_empty() {
note(~"Couldn't infer any crates to build.\n\ note("Couldn't infer any crates to build.\n\
Try naming a crate `main.rs`, `lib.rs`, \ Try naming a crate `main.rs`, `lib.rs`, \
`test.rs`, or `bench.rs`."); `test.rs`, or `bench.rs`.");
cond.raise(copy self.id); cond.raise(copy self.id);

View File

@ -866,6 +866,23 @@ mod test_map {
assert_eq!(m.len(), i); assert_eq!(m.len(), i);
assert!(!m.is_empty()); assert!(!m.is_empty());
} }
#[test]
fn test_find_equiv() {
let mut m = HashMap::new();
let (foo, bar, baz) = (1,2,3);
m.insert(~"foo", foo);
m.insert(~"bar", bar);
m.insert(~"baz", baz);
assert_eq!(m.find_equiv(&("foo")), Some(&foo));
assert_eq!(m.find_equiv(&("bar")), Some(&bar));
assert_eq!(m.find_equiv(&("baz")), Some(&baz));
assert_eq!(m.find_equiv(&("qux")), None);
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -761,7 +761,7 @@ impl<T:Reader> ReaderUtil for T {
fn read_lines(&self) -> ~[~str] { fn read_lines(&self) -> ~[~str] {
do vec::build |push| { do vec::build |push| {
for self.each_line |line| { for self.each_line |line| {
push(str::to_owned(line)); push(line.to_owned());
} }
} }
} }
@ -1091,7 +1091,7 @@ pub fn with_bytes_reader<T>(bytes: &[u8], f: &fn(@Reader) -> T) -> T {
} }
pub fn with_str_reader<T>(s: &str, f: &fn(@Reader) -> T) -> T { pub fn with_str_reader<T>(s: &str, f: &fn(@Reader) -> T) -> T {
str::byte_slice(s, |bytes| with_bytes_reader(bytes, f)) with_bytes_reader(s.as_bytes(), f)
} }
// Writing // Writing
@ -1462,7 +1462,7 @@ impl<T:Writer> WriterUtil for T {
self.write_str(str::from_char(ch)); self.write_str(str::from_char(ch));
} }
} }
fn write_str(&self, s: &str) { str::byte_slice(s, |v| self.write(v)) } fn write_str(&self, s: &str) { self.write(s.as_bytes()) }
fn write_line(&self, s: &str) { fn write_line(&self, s: &str) {
self.write_str(s); self.write_str(s);
self.write_str(&"\n"); self.write_str(&"\n");

View File

@ -793,27 +793,27 @@ mod tests {
#[test] #[test]
fn test_parse_bytes() { fn test_parse_bytes() {
use str::to_bytes; use str::StrSlice;
assert_eq!(parse_bytes(to_bytes("123"), 10u), Some(123 as $T)); assert_eq!(parse_bytes("123".as_bytes(), 10u), Some(123 as $T));
assert_eq!(parse_bytes(to_bytes("1001"), 2u), Some(9 as $T)); assert_eq!(parse_bytes("1001".as_bytes(), 2u), Some(9 as $T));
assert_eq!(parse_bytes(to_bytes("123"), 8u), Some(83 as $T)); assert_eq!(parse_bytes("123".as_bytes(), 8u), Some(83 as $T));
assert_eq!(i32::parse_bytes(to_bytes("123"), 16u), Some(291 as i32)); assert_eq!(i32::parse_bytes("123".as_bytes(), 16u), Some(291 as i32));
assert_eq!(i32::parse_bytes(to_bytes("ffff"), 16u), Some(65535 as i32)); assert_eq!(i32::parse_bytes("ffff".as_bytes(), 16u), Some(65535 as i32));
assert_eq!(i32::parse_bytes(to_bytes("FFFF"), 16u), Some(65535 as i32)); assert_eq!(i32::parse_bytes("FFFF".as_bytes(), 16u), Some(65535 as i32));
assert_eq!(parse_bytes(to_bytes("z"), 36u), Some(35 as $T)); assert_eq!(parse_bytes("z".as_bytes(), 36u), Some(35 as $T));
assert_eq!(parse_bytes(to_bytes("Z"), 36u), Some(35 as $T)); assert_eq!(parse_bytes("Z".as_bytes(), 36u), Some(35 as $T));
assert_eq!(parse_bytes(to_bytes("-123"), 10u), Some(-123 as $T)); assert_eq!(parse_bytes("-123".as_bytes(), 10u), Some(-123 as $T));
assert_eq!(parse_bytes(to_bytes("-1001"), 2u), Some(-9 as $T)); assert_eq!(parse_bytes("-1001".as_bytes(), 2u), Some(-9 as $T));
assert_eq!(parse_bytes(to_bytes("-123"), 8u), Some(-83 as $T)); assert_eq!(parse_bytes("-123".as_bytes(), 8u), Some(-83 as $T));
assert_eq!(i32::parse_bytes(to_bytes("-123"), 16u), Some(-291 as i32)); assert_eq!(i32::parse_bytes("-123".as_bytes(), 16u), Some(-291 as i32));
assert_eq!(i32::parse_bytes(to_bytes("-ffff"), 16u), Some(-65535 as i32)); assert_eq!(i32::parse_bytes("-ffff".as_bytes(), 16u), Some(-65535 as i32));
assert_eq!(i32::parse_bytes(to_bytes("-FFFF"), 16u), Some(-65535 as i32)); assert_eq!(i32::parse_bytes("-FFFF".as_bytes(), 16u), Some(-65535 as i32));
assert_eq!(parse_bytes(to_bytes("-z"), 36u), Some(-35 as $T)); assert_eq!(parse_bytes("-z".as_bytes(), 36u), Some(-35 as $T));
assert_eq!(parse_bytes(to_bytes("-Z"), 36u), Some(-35 as $T)); assert_eq!(parse_bytes("-Z".as_bytes(), 36u), Some(-35 as $T));
assert!(parse_bytes(to_bytes("Z"), 35u).is_none()); assert!(parse_bytes("Z".as_bytes(), 35u).is_none());
assert!(parse_bytes(to_bytes("-9"), 2u).is_none()); assert!(parse_bytes("-9".as_bytes(), 2u).is_none());
} }
#[test] #[test]

View File

@ -16,6 +16,7 @@ use ops::{Add, Sub, Mul, Div, Rem, Neg};
use option::{None, Option, Some}; use option::{None, Option, Some};
use char; use char;
use str; use str;
use str::{StrSlice};
use kinds::Copy; use kinds::Copy;
use vec; use vec;
use vec::{CopyableVector, ImmutableVector}; use vec::{CopyableVector, ImmutableVector};
@ -189,18 +190,18 @@ pub fn to_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+NumStrConv+Copy+
let _1: T = One::one(); let _1: T = One::one();
if is_NaN(num) { if is_NaN(num) {
return (str::to_bytes("NaN"), true); return ("NaN".as_bytes().to_owned(), true);
} }
else if is_inf(num){ else if is_inf(num){
return match sign { return match sign {
SignAll => (str::to_bytes("+inf"), true), SignAll => ("+inf".as_bytes().to_owned(), true),
_ => (str::to_bytes("inf"), true) _ => ("inf".as_bytes().to_owned(), true)
} }
} }
else if is_neg_inf(num) { else if is_neg_inf(num) {
return match sign { return match sign {
SignNone => (str::to_bytes("inf"), true), SignNone => ("inf".as_bytes().to_owned(), true),
_ => (str::to_bytes("-inf"), true), _ => ("-inf".as_bytes().to_owned(), true),
} }
} }
@ -638,7 +639,7 @@ pub fn from_str_common<T:NumCast+Zero+One+Eq+Ord+Copy+Div<T,T>+Mul<T,T>+
special: bool, exponent: ExponentFormat, empty_zero: bool, special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool ignore_underscores: bool
) -> Option<T> { ) -> Option<T> {
from_str_bytes_common(str::to_bytes(buf), radix, negative, from_str_bytes_common(buf.as_bytes(), radix, negative,
fractional, special, exponent, empty_zero, fractional, special, exponent, empty_zero,
ignore_underscores) ignore_underscores)
} }

View File

@ -538,16 +538,16 @@ mod tests {
#[test] #[test]
pub fn test_parse_bytes() { pub fn test_parse_bytes() {
use str::to_bytes; use str::StrSlice;
assert_eq!(parse_bytes(to_bytes("123"), 10u), Some(123u as $T)); assert_eq!(parse_bytes("123".as_bytes(), 10u), Some(123u as $T));
assert_eq!(parse_bytes(to_bytes("1001"), 2u), Some(9u as $T)); assert_eq!(parse_bytes("1001".as_bytes(), 2u), Some(9u as $T));
assert_eq!(parse_bytes(to_bytes("123"), 8u), Some(83u as $T)); assert_eq!(parse_bytes("123".as_bytes(), 8u), Some(83u as $T));
assert_eq!(u16::parse_bytes(to_bytes("123"), 16u), Some(291u as u16)); assert_eq!(u16::parse_bytes("123".as_bytes(), 16u), Some(291u as u16));
assert_eq!(u16::parse_bytes(to_bytes("ffff"), 16u), Some(65535u as u16)); assert_eq!(u16::parse_bytes("ffff".as_bytes(), 16u), Some(65535u as u16));
assert_eq!(parse_bytes(to_bytes("z"), 36u), Some(35u as $T)); assert_eq!(parse_bytes("z".as_bytes(), 36u), Some(35u as $T));
assert!(parse_bytes(to_bytes("Z"), 10u).is_none()); assert!(parse_bytes("Z".as_bytes(), 10u).is_none());
assert!(parse_bytes(to_bytes("_"), 2u).is_none()); assert!(parse_bytes("_".as_bytes(), 2u).is_none());
} }
#[test] #[test]

View File

@ -1448,9 +1448,9 @@ mod tests {
use rand::RngUtil; use rand::RngUtil;
use rand; use rand;
use run; use run;
use str;
use str::StrSlice; use str::StrSlice;
use vec; use vec;
use vec::CopyableVector;
use libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR}; use libc::consts::os::posix88::{S_IRUSR, S_IWUSR, S_IXUSR};
@ -1684,7 +1684,7 @@ mod tests {
}; };
assert!((ostream as uint != 0u)); assert!((ostream as uint != 0u));
let s = ~"hello"; let s = ~"hello";
let mut buf = str::to_bytes(s) + [0 as u8]; let mut buf = s.as_bytes_with_null().to_owned();
do vec::as_mut_buf(buf) |b, _len| { do vec::as_mut_buf(buf) |b, _len| {
assert!((libc::fwrite(b as *c_void, 1u as size_t, assert!((libc::fwrite(b as *c_void, 1u as size_t,
(s.len() + 1u) as size_t, ostream) (s.len() + 1u) as size_t, ostream)

View File

@ -515,7 +515,7 @@ impl GenericPath for PosixPath {
fn with_filestem(&self, s: &str) -> PosixPath { fn with_filestem(&self, s: &str) -> PosixPath {
match self.filetype() { match self.filetype() {
None => self.with_filename(s), None => self.with_filename(s),
Some(ref t) => self.with_filename(str::to_owned(s) + *t), Some(ref t) => self.with_filename(s.to_owned() + *t),
} }
} }
@ -657,7 +657,7 @@ impl GenericPath for WindowsPath {
(None, None) => { (None, None) => {
host = None; host = None;
device = None; device = None;
rest = str::to_owned(s); rest = s.to_owned();
} }
} }
@ -729,7 +729,7 @@ impl GenericPath for WindowsPath {
fn with_filestem(&self, s: &str) -> WindowsPath { fn with_filestem(&self, s: &str) -> WindowsPath {
match self.filetype() { match self.filetype() {
None => self.with_filename(s), None => self.with_filename(s),
Some(ref t) => self.with_filename(str::to_owned(s) + *t), Some(ref t) => self.with_filename(s.to_owned() + *t),
} }
} }
@ -947,7 +947,6 @@ pub mod windows {
mod tests { mod tests {
use option::{None, Some}; use option::{None, Some};
use path::{PosixPath, WindowsPath, windows}; use path::{PosixPath, WindowsPath, windows};
use str;
#[test] #[test]
fn test_double_slash_collapsing() { fn test_double_slash_collapsing() {
@ -984,7 +983,7 @@ mod tests {
fn test_posix_paths() { fn test_posix_paths() {
fn t(wp: &PosixPath, s: &str) { fn t(wp: &PosixPath, s: &str) {
let ss = wp.to_str(); let ss = wp.to_str();
let sss = str::to_owned(s); let sss = s.to_owned();
if (ss != sss) { if (ss != sss) {
debug!("got %s", ss); debug!("got %s", ss);
debug!("expected %s", sss); debug!("expected %s", sss);
@ -1042,7 +1041,7 @@ mod tests {
fn test_normalize() { fn test_normalize() {
fn t(wp: &PosixPath, s: &str) { fn t(wp: &PosixPath, s: &str) {
let ss = wp.to_str(); let ss = wp.to_str();
let sss = str::to_owned(s); let sss = s.to_owned();
if (ss != sss) { if (ss != sss) {
debug!("got %s", ss); debug!("got %s", ss);
debug!("expected %s", sss); debug!("expected %s", sss);
@ -1105,7 +1104,7 @@ mod tests {
fn test_windows_paths() { fn test_windows_paths() {
fn t(wp: &WindowsPath, s: &str) { fn t(wp: &WindowsPath, s: &str) {
let ss = wp.to_str(); let ss = wp.to_str();
let sss = str::to_owned(s); let sss = s.to_owned();
if (ss != sss) { if (ss != sss) {
debug!("got %s", ss); debug!("got %s", ss);
debug!("expected %s", sss); debug!("expected %s", sss);

View File

@ -64,7 +64,7 @@ pub use path::PosixPath;
pub use path::WindowsPath; pub use path::WindowsPath;
pub use ptr::RawPtr; pub use ptr::RawPtr;
pub use ascii::{Ascii, AsciiCast, OwnedAsciiCast, AsciiStr}; pub use ascii::{Ascii, AsciiCast, OwnedAsciiCast, AsciiStr};
pub use str::{StrVector, StrSlice, OwnedStr, StrUtil}; pub use str::{StrVector, StrSlice, OwnedStr, StrUtil, NullTerminatedStr};
pub use from_str::{FromStr}; pub use from_str::{FromStr};
pub use to_bytes::IterBytes; pub use to_bytes::IterBytes;
pub use to_str::{ToStr, ToStrConsume}; pub use to_str::{ToStr, ToStrConsume};

View File

@ -577,7 +577,7 @@ impl<R: Rng> RngUtil for R {
/// Shuffle a vec /// Shuffle a vec
fn shuffle<T:Copy>(&mut self, values: &[T]) -> ~[T] { fn shuffle<T:Copy>(&mut self, values: &[T]) -> ~[T] {
let mut m = vec::to_owned(values); let mut m = values.to_owned();
self.shuffle_mut(m); self.shuffle_mut(m);
m m
} }

View File

@ -75,5 +75,5 @@ fn super_simple_smoke_test_lets_go_read_some_files_and_have_a_good_time() {
let message = "it's alright. have a good time"; let message = "it's alright. have a good time";
let filename = &Path("test.txt"); let filename = &Path("test.txt");
let mut outstream = FileStream::open(filename, Create, Read).unwrap(); let mut outstream = FileStream::open(filename, Create, Read).unwrap();
outstream.write(message.to_bytes()); outstream.write(message.as_bytes());
} }

View File

@ -108,7 +108,7 @@ mod test {
let mem_writer = MemWriter::new(); let mem_writer = MemWriter::new();
let mut deflate_writer = DeflateWriter::new(mem_writer); let mut deflate_writer = DeflateWriter::new(mem_writer);
let in_msg = "test"; let in_msg = "test";
let in_bytes = in_msg.to_bytes(); let in_bytes = in_msg.as_bytes();
deflate_writer.write(in_bytes); deflate_writer.write(in_bytes);
deflate_writer.flush(); deflate_writer.flush();
let buf = deflate_writer.inner().inner(); let buf = deflate_writer.inner().inner();

View File

@ -741,8 +741,7 @@ fn with_envp<T>(env: Option<&[(~str, ~str)]>, cb: &fn(*mut c_void) -> T) -> T {
let mut blk = ~[]; let mut blk = ~[];
for es.each |&(k, v)| { for es.each |&(k, v)| {
let kv = fmt!("%s=%s", k, v); let kv = fmt!("%s=%s", k, v);
blk.push_all(str::as_bytes_slice(kv)); blk.push_all(kv.as_bytes_with_null_consume());
blk.push(0);
} }
blk.push(0); blk.push(0);
vec::as_imm_buf(blk, |p, _len| vec::as_imm_buf(blk, |p, _len|

File diff suppressed because it is too large Load Diff

View File

@ -18,7 +18,7 @@ use io;
use io::Writer; use io::Writer;
use option::{None, Option, Some}; use option::{None, Option, Some};
use old_iter::BaseIter; use old_iter::BaseIter;
use str; use str::StrSlice;
pub type Cb<'self> = &'self fn(buf: &[u8]) -> bool; pub type Cb<'self> = &'self fn(buf: &[u8]) -> bool;
@ -239,27 +239,25 @@ impl<A:IterBytes> IterBytes for @[A] {
impl<'self> IterBytes for &'self str { impl<'self> IterBytes for &'self str {
#[inline(always)] #[inline(always)]
fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool { fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool {
do str::byte_slice(*self) |bytes| { f(self.as_bytes())
f(bytes)
}
} }
} }
impl IterBytes for ~str { impl IterBytes for ~str {
#[inline(always)] #[inline(always)]
fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool { fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool {
do str::byte_slice(*self) |bytes| { // this should possibly include the null terminator, but that
f(bytes) // breaks .find_equiv on hashmaps.
} f(self.as_bytes())
} }
} }
impl IterBytes for @str { impl IterBytes for @str {
#[inline(always)] #[inline(always)]
fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool { fn iter_bytes(&self, _lsb0: bool, f: Cb) -> bool {
do str::byte_slice(*self) |bytes| { // this should possibly include the null terminator, but that
f(bytes) // breaks .find_equiv on hashmaps.
} f(self.as_bytes())
} }
} }

View File

@ -325,7 +325,7 @@ pub mod ct {
'o' => TyOctal, 'o' => TyOctal,
'f' => TyFloat, 'f' => TyFloat,
'?' => TyPoly, '?' => TyPoly,
_ => err(~"unknown type in conversion: " + s.substr(i, 1)) _ => err(fmt!("unknown type in conversion: %c", s.char_at(i)))
}; };
Parsed::new(t, i + 1) Parsed::new(t, i + 1)
@ -546,7 +546,7 @@ pub mod rt {
// displayed // displayed
let unpadded = match cv.precision { let unpadded = match cv.precision {
CountImplied => s, CountImplied => s,
CountIs(max) => if (max as uint) < str::char_len(s) { CountIs(max) => if (max as uint) < s.char_len() {
s.slice(0, max as uint) s.slice(0, max as uint)
} else { } else {
s s
@ -584,7 +584,7 @@ pub mod rt {
~"" ~""
} else { } else {
let s = uint::to_str_radix(num, radix); let s = uint::to_str_radix(num, radix);
let len = str::char_len(s); let len = s.char_len();
if len < prec { if len < prec {
let diff = prec - len; let diff = prec - len;
let pad = str::from_chars(vec::from_elem(diff, '0')); let pad = str::from_chars(vec::from_elem(diff, '0'));
@ -614,7 +614,7 @@ pub mod rt {
} }
CountIs(width) => { width as uint } CountIs(width) => { width as uint }
}; };
let strlen = str::char_len(s) + headsize; let strlen = s.char_len() + headsize;
if uwidth <= strlen { if uwidth <= strlen {
for head.iter().advance |&c| { for head.iter().advance |&c| {
buf.push_char(c); buf.push_char(c);

View File

@ -171,11 +171,6 @@ pub fn from_elem<T:Copy>(n_elts: uint, t: T) -> ~[T] {
} }
} }
/// Creates a new unique vector with the same contents as the slice
pub fn to_owned<T:Copy>(t: &[T]) -> ~[T] {
from_fn(t.len(), |i| t[i])
}
/// Creates a new vector with a capacity of `capacity` /// Creates a new vector with a capacity of `capacity`
pub fn with_capacity<T>(capacity: uint) -> ~[T] { pub fn with_capacity<T>(capacity: uint) -> ~[T] {
let mut vec = ~[]; let mut vec = ~[];
@ -1787,7 +1782,7 @@ pub trait CopyableVector<T> {
/// Extension methods for vectors /// Extension methods for vectors
impl<'self,T:Copy> CopyableVector<T> for &'self [T] { impl<'self,T:Copy> CopyableVector<T> for &'self [T] {
/// Returns a copy of `v`. /// Creates a new unique vector with the same contents as the slice
#[inline] #[inline]
fn to_owned(&self) -> ~[T] { fn to_owned(&self) -> ~[T] {
let mut result = ~[]; let mut result = ~[];
@ -1796,7 +1791,6 @@ impl<'self,T:Copy> CopyableVector<T> for &'self [T] {
result.push(copy *e); result.push(copy *e);
} }
result result
} }
} }
@ -3361,19 +3355,19 @@ mod tests {
let mut results: ~[~[int]]; let mut results: ~[~[int]];
results = ~[]; results = ~[];
for each_permutation([]) |v| { results.push(to_owned(v)); } for each_permutation([]) |v| { results.push(v.to_owned()); }
assert_eq!(results, ~[~[]]); assert_eq!(results, ~[~[]]);
results = ~[]; results = ~[];
for each_permutation([7]) |v| { results.push(to_owned(v)); } for each_permutation([7]) |v| { results.push(v.to_owned()); }
assert_eq!(results, ~[~[7]]); assert_eq!(results, ~[~[7]]);
results = ~[]; results = ~[];
for each_permutation([1,1]) |v| { results.push(to_owned(v)); } for each_permutation([1,1]) |v| { results.push(v.to_owned()); }
assert_eq!(results, ~[~[1,1],~[1,1]]); assert_eq!(results, ~[~[1,1],~[1,1]]);
results = ~[]; results = ~[];
for each_permutation([5,2,0]) |v| { results.push(to_owned(v)); } for each_permutation([5,2,0]) |v| { results.push(v.to_owned()); }
assert!(results == assert!(results ==
~[~[5,2,0],~[5,0,2],~[2,5,0],~[2,0,5],~[0,5,2],~[0,2,5]]); ~[~[5,2,0],~[5,0,2],~[2,5,0],~[2,0,5],~[0,5,2],~[0,2,5]]);
} }

View File

@ -259,7 +259,7 @@ pub fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: &str)
pub fn sort_meta_items(items: &[@ast::meta_item]) -> ~[@ast::meta_item] { pub fn sort_meta_items(items: &[@ast::meta_item]) -> ~[@ast::meta_item] {
// This is sort of stupid here, converting to a vec of mutables and back // This is sort of stupid here, converting to a vec of mutables and back
let mut v = vec::to_owned(items); let mut v = items.to_owned();
do extra::sort::quick_sort(v) |ma, mb| { do extra::sort::quick_sort(v) |ma, mb| {
get_meta_item_name(*ma) <= get_meta_item_name(*mb) get_meta_item_name(*ma) <= get_meta_item_name(*mb)
} }

View File

@ -21,8 +21,6 @@ use ext::base::*;
use parse; use parse;
use parse::token; use parse::token;
use core::vec;
enum State { enum State {
Asm, Asm,
Outputs, Outputs,
@ -45,7 +43,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult { -> base::MacResult {
let p = parse::new_parser_from_tts(cx.parse_sess(), let p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), cx.cfg(),
vec::to_owned(tts)); tts.to_owned());
let mut asm = ~""; let mut asm = ~"";
let mut outputs = ~[]; let mut outputs = ~[];

View File

@ -22,7 +22,6 @@ use parse::token;
use parse::token::{ident_to_str, intern, str_to_ident}; use parse::token::{ident_to_str, intern, str_to_ident};
use core::hashmap::HashMap; use core::hashmap::HashMap;
use core::vec;
// new-style macro! tt code: // new-style macro! tt code:
// //
@ -367,7 +366,7 @@ pub fn get_exprs_from_tts(cx: @ExtCtxt, tts: &[ast::token_tree])
-> ~[@ast::expr] { -> ~[@ast::expr] {
let p = parse::new_parser_from_tts(cx.parse_sess(), let p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), cx.cfg(),
vec::to_owned(tts)); tts.to_owned());
let mut es = ~[]; let mut es = ~[];
while *p.token != token::EOF { while *p.token != token::EOF {
if es.len() != 0 { if es.len() != 0 {

View File

@ -18,7 +18,6 @@ use print;
use parse::token::{get_ident_interner}; use parse::token::{get_ident_interner};
use core::io; use core::io;
use core::vec;
pub fn expand_syntax_ext(cx: @ExtCtxt, pub fn expand_syntax_ext(cx: @ExtCtxt,
sp: codemap::span, sp: codemap::span,
@ -28,7 +27,7 @@ pub fn expand_syntax_ext(cx: @ExtCtxt,
cx.print_backtrace(); cx.print_backtrace();
io::stdout().write_line( io::stdout().write_line(
print::pprust::tt_to_str( print::pprust::tt_to_str(
ast::tt_delim(vec::to_owned(tt)), ast::tt_delim(tt.to_owned()),
get_ident_interner())); get_ident_interner()));
//trivial expression //trivial expression

View File

@ -19,8 +19,6 @@ use parse::token::*;
use parse::token; use parse::token;
use parse; use parse;
use core::vec;
/** /**
* *
* Quasiquoting works via token trees. * Quasiquoting works via token trees.
@ -40,8 +38,6 @@ pub mod rt {
use parse; use parse;
use print::pprust; use print::pprust;
use core::str;
pub use ast::*; pub use ast::*;
pub use parse::token::*; pub use parse::token::*;
pub use parse::new_parser_from_tts; pub use parse::new_parser_from_tts;
@ -128,7 +124,7 @@ pub mod rt {
impl<'self> ToSource for &'self str { impl<'self> ToSource for &'self str {
fn to_source(&self) -> ~str { fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_str(@str::to_owned(*self))); let lit = dummy_spanned(ast::lit_str(@self.to_owned()));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit)
} }
} }
@ -661,7 +657,7 @@ fn expand_tts(cx: @ExtCtxt,
let p = parse::new_parser_from_tts( let p = parse::new_parser_from_tts(
cx.parse_sess(), cx.parse_sess(),
cx.cfg(), cx.cfg(),
vec::to_owned(tts) tts.to_owned()
); );
*p.quote_depth += 1u; *p.quote_depth += 1u;
let tts = p.parse_all_token_trees(); let tts = p.parse_all_token_trees();

View File

@ -18,8 +18,6 @@ use parse::lexer::{new_tt_reader, reader};
use parse::parser::Parser; use parse::parser::Parser;
use parse::token::keywords; use parse::token::keywords;
use core::vec;
pub fn expand_trace_macros(cx: @ExtCtxt, pub fn expand_trace_macros(cx: @ExtCtxt,
sp: span, sp: span,
tt: &[ast::token_tree]) tt: &[ast::token_tree])
@ -29,7 +27,7 @@ pub fn expand_trace_macros(cx: @ExtCtxt,
let tt_rdr = new_tt_reader( let tt_rdr = new_tt_reader(
copy cx.parse_sess().span_diagnostic, copy cx.parse_sess().span_diagnostic,
None, None,
vec::to_owned(tt) tt.to_owned()
); );
let rdr = tt_rdr as @reader; let rdr = tt_rdr as @reader;
let rust_parser = Parser( let rust_parser = Parser(

View File

@ -26,7 +26,6 @@ use parse::token::{FAT_ARROW, SEMI, nt_matchers, nt_tt};
use print; use print;
use core::io; use core::io;
use core::vec;
pub fn add_new_extension(cx: @ExtCtxt, pub fn add_new_extension(cx: @ExtCtxt,
sp: span, sp: span,
@ -84,7 +83,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
io::println(fmt!("%s! { %s }", io::println(fmt!("%s! { %s }",
cx.str_of(name), cx.str_of(name),
print::pprust::tt_to_str( print::pprust::tt_to_str(
ast::tt_delim(vec::to_owned(arg)), ast::tt_delim(arg.to_owned()),
get_ident_interner()))); get_ident_interner())));
} }
@ -101,7 +100,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
let arg_rdr = new_tt_reader( let arg_rdr = new_tt_reader(
s_d, s_d,
None, None,
vec::to_owned(arg) arg.to_owned()
) as @reader; ) as @reader;
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) { match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) {
success(named_matches) => { success(named_matches) => {

View File

@ -193,7 +193,7 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
} }
body body
} }
LIT_STR(ref s) => { ~"\"" + str::escape_default(*ident_to_str(s)) + "\"" } LIT_STR(ref s) => { ~"\"" + ident_to_str(s).escape_default() + "\"" }
/* Name components */ /* Name components */
IDENT(s, _) => copy *in.get(s.name), IDENT(s, _) => copy *in.get(s.name),

View File

@ -31,7 +31,6 @@ use print::pprust;
use core::char; use core::char;
use core::io; use core::io;
use core::str;
use core::u64; use core::u64;
use core::uint; use core::uint;
use core::iterator::IteratorUtil; use core::iterator::IteratorUtil;
@ -2113,7 +2112,7 @@ pub fn print_comment(s: @ps, cmnt: &comments::cmnt) {
pub fn print_string(s: @ps, st: &str) { pub fn print_string(s: @ps, st: &str) {
word(s.s, "\""); word(s.s, "\"");
word(s.s, str::escape_default(st)); word(s.s, st.escape_default());
word(s.s, "\""); word(s.s, "\"");
} }

View File

@ -93,7 +93,7 @@ impl RepeatFasta {
let stdout = self.stdout; let stdout = self.stdout;
let alu_len = self.alu.len(); let alu_len = self.alu.len();
let mut buf = vec::from_elem(alu_len + LINE_LEN, 0u8); let mut buf = vec::from_elem(alu_len + LINE_LEN, 0u8);
let alu: &[u8] = str::byte_slice_no_callback(self.alu); let alu: &[u8] = self.alu.as_bytes_with_null();
copy_memory(buf, alu, alu_len); copy_memory(buf, alu, alu_len);
copy_memory(vec::mut_slice(buf, alu_len, buf.len()), copy_memory(vec::mut_slice(buf, alu_len, buf.len()),

View File

@ -81,7 +81,8 @@ fn sort_and_fmt(mm: &HashMap<~[u8], uint>, total: uint) -> ~str {
fn find(mm: &HashMap<~[u8], uint>, key: ~str) -> uint { fn find(mm: &HashMap<~[u8], uint>, key: ~str) -> uint {
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use // FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
// to_ascii_consume and to_str_consume to not do a unnecessary copy. // to_ascii_consume and to_str_consume to not do a unnecessary copy.
match mm.find(&str::to_bytes(key.to_ascii().to_lower().to_str_ascii())) { let key = key.to_ascii().to_lower().to_str_ascii();
match mm.find_equiv(&key.as_bytes()) {
option::None => { return 0u; } option::None => { return 0u; }
option::Some(&num) => { return num; } option::Some(&num) => { return num; }
} }
@ -208,10 +209,10 @@ fn main() {
// process the sequence for k-mers // process the sequence for k-mers
(_, true) => { (_, true) => {
let line_bytes = str::to_bytes(line); let line_bytes = line.as_bytes();
for sizes.eachi |ii, _sz| { for sizes.eachi |ii, _sz| {
let mut lb = copy line_bytes; let mut lb = line_bytes.to_owned();
to_child[ii].send(lb); to_child[ii].send(lb);
} }
} }

View File

@ -218,8 +218,7 @@ fn read_stdin() -> ~[u8] {
fstat(fileno(stdin), &mut st); fstat(fileno(stdin), &mut st);
let mut buf = vec::from_elem(st.st_size as uint, 0); let mut buf = vec::from_elem(st.st_size as uint, 0);
let header = str::byte_slice_no_callback(">THREE"); let header = ">THREE".as_bytes();
let header = vec::slice(header, 0, 6);
{ {
let mut window: &mut [u8] = buf; let mut window: &mut [u8] = buf;

View File

@ -111,8 +111,7 @@ fn main() {
if opts.stress { if opts.stress {
stress(2); stress(2);
} else { } else {
let max = uint::parse_bytes(str::to_bytes(args[1]), let max = uint::parse_bytes(args[1].as_bytes(), 10u).get() as int;
10u).get() as int;
let num_trials = 10; let num_trials = 10;

View File

@ -26,7 +26,7 @@ mod libc {
fn strlen(str: ~str) -> uint { fn strlen(str: ~str) -> uint {
unsafe { unsafe {
// C string is terminated with a zero // C string is terminated with a zero
let bytes = str::to_bytes(str) + ~[0u8]; let bytes = str.as_bytes_with_null_consume();
return libc::my_strlen(vec::raw::to_ptr(bytes)); return libc::my_strlen(vec::raw::to_ptr(bytes));
} }
} }

View File

@ -48,7 +48,7 @@ mod map_reduce {
} }
let (pp, cc) = stream(); let (pp, cc) = stream();
error!("sending find_reducer"); error!("sending find_reducer");
ctrl.send(find_reducer(str::to_bytes(key), cc)); ctrl.send(find_reducer(key.as_bytes().to_owned(), cc));
error!("receiving"); error!("receiving");
let c = pp.recv(); let c = pp.recv();
error!(c); error!(c);

View File

@ -15,6 +15,6 @@ use std::str;
pub fn main() { pub fn main() {
let mut m = HashMap::new(); let mut m = HashMap::new();
m.insert(str::to_bytes(~"foo"), str::to_bytes(~"bar")); m.insert("foo".as_bytes().to_owned(), "bar".as_bytes().to_owned());
error!(m); error!(m);
} }

View File

@ -14,5 +14,5 @@ struct S { f0: ~str, f1: int }
pub fn main() { pub fn main() {
let s = ~"Hello, world!"; let s = ~"Hello, world!";
let _s = S { f0: str::to_owned(s), ..S { f0: s, f1: 23 } }; let _s = S { f0: s.to_owned(), ..S { f0: s, f1: 23 } };
} }

View File

@ -14,5 +14,5 @@ struct S { f0: ~str, f1: ~str }
pub fn main() { pub fn main() {
let s = ~"Hello, world!"; let s = ~"Hello, world!";
let _s = S { f1: str::to_owned(s), f0: s }; let _s = S { f1: s.to_owned(), f0: s };
} }

View File

@ -21,24 +21,24 @@ pub fn main() {
let schs: ~[char] = s.iter().collect(); let schs: ~[char] = s.iter().collect();
assert!(s.len() == 10u); assert!(s.len() == 10u);
assert!(str::char_len(s) == 4u); assert!(s.char_len() == 4u);
assert!(schs.len() == 4u); assert!(schs.len() == 4u);
assert!(str::from_chars(schs) == s); assert!(str::from_chars(schs) == s);
assert!(s.char_at(0u) == 'e'); assert!(s.char_at(0u) == 'e');
assert!(s.char_at(1u) == 'é'); assert!(s.char_at(1u) == 'é');
assert!((str::is_utf8(str::to_bytes(s)))); assert!((str::is_utf8(s.as_bytes())));
assert!((!str::is_utf8(~[0x80_u8]))); assert!((!str::is_utf8(~[0x80_u8])));
assert!((!str::is_utf8(~[0xc0_u8]))); assert!((!str::is_utf8(~[0xc0_u8])));
assert!((!str::is_utf8(~[0xc0_u8, 0x10_u8]))); assert!((!str::is_utf8(~[0xc0_u8, 0x10_u8])));
let mut stack = ~"a×c€"; let mut stack = ~"a×c€";
assert_eq!(str::pop_char(&mut stack), '€'); assert_eq!(stack.pop_char(), '€');
assert_eq!(str::pop_char(&mut stack), 'c'); assert_eq!(stack.pop_char(), 'c');
stack.push_char('u'); stack.push_char('u');
assert!(stack == ~"a×u"); assert!(stack == ~"a×u");
assert_eq!(str::shift_char(&mut stack), 'a'); assert_eq!(stack.shift_char(), 'a');
assert_eq!(str::shift_char(&mut stack), '×'); assert_eq!(stack.shift_char(), '×');
str::unshift_char(&mut stack, 'ß'); stack.unshift_char('ß');
assert!(stack == ~"ßu"); assert!(stack == ~"ßu");
} }