auto merge of #12768 : pnkfelix/rust/fsk-devecing, r=pnkfelix

Change `~[T]` to Vec<T> in librustc.  Rebased and amended version of PR #12716.

Original author (or perhaps I should say meta-author) was @pcwalton, as is reflected in the commits.

I clean up!  :)
This commit is contained in:
bors 2014-03-08 13:01:55 -08:00
commit 0017056105
106 changed files with 2053 additions and 1514 deletions

View File

@ -16,6 +16,7 @@ use metadata::filesearch;
use lib::llvm::{ArchiveRef, llvm}; use lib::llvm::{ArchiveRef, llvm};
use std::cast; use std::cast;
use std::vec_ng::Vec;
use std::io::fs; use std::io::fs;
use std::io; use std::io;
use std::libc; use std::libc;
@ -41,7 +42,7 @@ fn run_ar(sess: Session, args: &str, cwd: Option<&Path>,
paths: &[&Path]) -> ProcessOutput { paths: &[&Path]) -> ProcessOutput {
let ar = get_ar_prog(sess); let ar = get_ar_prog(sess);
let mut args = ~[args.to_owned()]; let mut args = vec!(args.to_owned());
let mut paths = paths.iter().map(|p| p.as_str().unwrap().to_owned()); let mut paths = paths.iter().map(|p| p.as_str().unwrap().to_owned());
args.extend(&mut paths); args.extend(&mut paths);
debug!("{} {}", ar, args.connect(" ")); debug!("{} {}", ar, args.connect(" "));
@ -89,7 +90,7 @@ impl Archive {
} }
/// Read a file in the archive /// Read a file in the archive
pub fn read(&self, file: &str) -> ~[u8] { pub fn read(&self, file: &str) -> Vec<u8> {
// Apparently if "ar p" is used on windows, it generates a corrupt file // Apparently if "ar p" is used on windows, it generates a corrupt file
// which has bad headers and LLVM will immediately choke on it // which has bad headers and LLVM will immediately choke on it
if cfg!(windows) && cfg!(windows) { // FIXME(#10734) double-and if cfg!(windows) && cfg!(windows) { // FIXME(#10734) double-and
@ -97,9 +98,17 @@ impl Archive {
let archive = os::make_absolute(&self.dst); let archive = os::make_absolute(&self.dst);
run_ar(self.sess, "x", Some(loc.path()), [&archive, run_ar(self.sess, "x", Some(loc.path()), [&archive,
&Path::new(file)]); &Path::new(file)]);
fs::File::open(&loc.path().join(file)).read_to_end().unwrap() let result: Vec<u8> =
fs::File::open(&loc.path().join(file)).read_to_end()
.unwrap()
.move_iter()
.collect();
result
} else { } else {
run_ar(self.sess, "p", None, [&self.dst, &Path::new(file)]).output run_ar(self.sess,
"p",
None,
[&self.dst, &Path::new(file)]).output.move_iter().collect()
} }
} }
@ -119,11 +128,11 @@ impl Archive {
lto: bool) -> io::IoResult<()> { lto: bool) -> io::IoResult<()> {
let object = format!("{}.o", name); let object = format!("{}.o", name);
let bytecode = format!("{}.bc", name); let bytecode = format!("{}.bc", name);
let mut ignore = ~[METADATA_FILENAME, bytecode.as_slice()]; let mut ignore = vec!(METADATA_FILENAME, bytecode.as_slice());
if lto { if lto {
ignore.push(object.as_slice()); ignore.push(object.as_slice());
} }
self.add_archive(rlib, name, ignore) self.add_archive(rlib, name, ignore.as_slice())
} }
/// Adds an arbitrary file to this archive /// Adds an arbitrary file to this archive
@ -143,7 +152,7 @@ impl Archive {
} }
/// Lists all files in an archive /// Lists all files in an archive
pub fn files(&self) -> ~[~str] { pub fn files(&self) -> Vec<~str> {
let output = run_ar(self.sess, "t", None, [&self.dst]); let output = run_ar(self.sess, "t", None, [&self.dst]);
let output = str::from_utf8(output.output).unwrap(); let output = str::from_utf8(output.output).unwrap();
// use lines_any because windows delimits output with `\r\n` instead of // use lines_any because windows delimits output with `\r\n` instead of
@ -168,7 +177,7 @@ impl Archive {
// all SYMDEF files as these are just magical placeholders which get // all SYMDEF files as these are just magical placeholders which get
// re-created when we make a new archive anyway. // re-created when we make a new archive anyway.
let files = try!(fs::readdir(loc.path())); let files = try!(fs::readdir(loc.path()));
let mut inputs = ~[]; let mut inputs = Vec::new();
for file in files.iter() { for file in files.iter() {
let filename = file.filename_str().unwrap(); let filename = file.filename_str().unwrap();
if skip.iter().any(|s| *s == filename) { continue } if skip.iter().any(|s| *s == filename) { continue }
@ -182,7 +191,7 @@ impl Archive {
if inputs.len() == 0 { return Ok(()) } if inputs.len() == 0 { return Ok(()) }
// Finally, add all the renamed files to this archive // Finally, add all the renamed files to this archive
let mut args = ~[&self.dst]; let mut args = vec!(&self.dst);
args.extend(&mut inputs.iter()); args.extend(&mut inputs.iter());
run_ar(self.sess, "r", None, args.as_slice()); run_ar(self.sess, "r", None, args.as_slice());
Ok(()) Ok(())

View File

@ -15,9 +15,9 @@ use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t { pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
let cc_args = if target_triple.contains("thumb") { let cc_args = if target_triple.contains("thumb") {
~[~"-mthumb"] vec!(~"-mthumb")
} else { } else {
~[~"-marm"] vec!(~"-marm")
}; };
return target_strs::t { return target_strs::t {
module_asm: ~"", module_asm: ~"",

View File

@ -34,6 +34,7 @@ use std::str;
use std::io; use std::io;
use std::io::Process; use std::io::Process;
use std::io::fs; use std::io::fs;
use std::vec_ng::Vec;
use flate; use flate;
use serialize::hex::ToHex; use serialize::hex::ToHex;
use extra::tempfile::TempDir; use extra::tempfile::TempDir;
@ -106,6 +107,7 @@ pub mod write {
use std::io::Process; use std::io::Process;
use std::libc::{c_uint, c_int}; use std::libc::{c_uint, c_int};
use std::str; use std::str;
use std::vec_ng::Vec;
// On android, we by default compile for armv7 processors. This enables // On android, we by default compile for armv7 processors. This enables
// things like double word CAS instructions (rather than emulating them) // things like double word CAS instructions (rather than emulating them)
@ -222,7 +224,7 @@ pub mod write {
if sess.lto() { if sess.lto() {
time(sess.time_passes(), "all lto passes", (), |()| time(sess.time_passes(), "all lto passes", (), |()|
lto::run(sess, llmod, tm, trans.reachable)); lto::run(sess, llmod, tm, trans.reachable.as_slice()));
if sess.opts.cg.save_temps { if sess.opts.cg.save_temps {
output.with_extension("lto.bc").with_c_str(|buf| { output.with_extension("lto.bc").with_c_str(|buf| {
@ -363,8 +365,8 @@ pub mod write {
let vectorize_slp = !sess.opts.cg.no_vectorize_slp && let vectorize_slp = !sess.opts.cg.no_vectorize_slp &&
sess.opts.optimize == session::Aggressive; sess.opts.optimize == session::Aggressive;
let mut llvm_c_strs = ~[]; let mut llvm_c_strs = Vec::new();
let mut llvm_args = ~[]; let mut llvm_args = Vec::new();
{ {
let add = |arg: &str| { let add = |arg: &str| {
let s = arg.to_c_str(); let s = arg.to_c_str();
@ -781,8 +783,8 @@ fn remove(sess: Session, path: &Path) {
pub fn link_binary(sess: Session, pub fn link_binary(sess: Session,
trans: &CrateTranslation, trans: &CrateTranslation,
outputs: &OutputFilenames, outputs: &OutputFilenames,
id: &CrateId) -> ~[Path] { id: &CrateId) -> Vec<Path> {
let mut out_filenames = ~[]; let mut out_filenames = Vec::new();
let crate_types = sess.crate_types.borrow(); let crate_types = sess.crate_types.borrow();
for &crate_type in crate_types.get().iter() { for &crate_type in crate_types.get().iter() {
let out_file = link_binary_output(sess, trans, crate_type, outputs, id); let out_file = link_binary_output(sess, trans, crate_type, outputs, id);
@ -931,7 +933,8 @@ fn link_rlib(sess: Session,
// the same filename for metadata (stomping over one another) // the same filename for metadata (stomping over one another)
let tmpdir = TempDir::new("rustc").expect("needs a temp dir"); let tmpdir = TempDir::new("rustc").expect("needs a temp dir");
let metadata = tmpdir.path().join(METADATA_FILENAME); let metadata = tmpdir.path().join(METADATA_FILENAME);
match fs::File::create(&metadata).write(trans.metadata) { match fs::File::create(&metadata).write(trans.metadata
.as_slice()) {
Ok(..) => {} Ok(..) => {}
Err(e) => { Err(e) => {
sess.err(format!("failed to write {}: {}", sess.err(format!("failed to write {}: {}",
@ -1035,7 +1038,7 @@ fn link_natively(sess: Session, dylib: bool, obj_filename: &Path,
// Invoke the system linker // Invoke the system linker
debug!("{} {}", cc_prog, cc_args.connect(" ")); debug!("{} {}", cc_prog, cc_args.connect(" "));
let prog = time(sess.time_passes(), "running linker", (), |()| let prog = time(sess.time_passes(), "running linker", (), |()|
Process::output(cc_prog, cc_args)); Process::output(cc_prog, cc_args.as_slice()));
match prog { match prog {
Ok(prog) => { Ok(prog) => {
if !prog.status.success() { if !prog.status.success() {
@ -1071,7 +1074,7 @@ fn link_args(sess: Session,
dylib: bool, dylib: bool,
tmpdir: &Path, tmpdir: &Path,
obj_filename: &Path, obj_filename: &Path,
out_filename: &Path) -> ~[~str] { out_filename: &Path) -> Vec<~str> {
// The default library location, we need this to find the runtime. // The default library location, we need this to find the runtime.
// The location of crates will be determined as needed. // The location of crates will be determined as needed.
@ -1079,7 +1082,7 @@ fn link_args(sess: Session,
let lib_path = sess.filesearch.get_target_lib_path(); let lib_path = sess.filesearch.get_target_lib_path();
let stage: ~str = ~"-L" + lib_path.as_str().unwrap(); let stage: ~str = ~"-L" + lib_path.as_str().unwrap();
let mut args = ~[stage]; let mut args = vec!(stage);
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
args.push_all([ args.push_all([
@ -1198,7 +1201,7 @@ fn link_args(sess: Session,
// where extern libraries might live, based on the // where extern libraries might live, based on the
// addl_lib_search_paths // addl_lib_search_paths
if !sess.opts.cg.no_rpath { if !sess.opts.cg.no_rpath {
args.push_all(rpath::get_rpath_flags(sess, out_filename)); args.push_all(rpath::get_rpath_flags(sess, out_filename).as_slice());
} }
// Stack growth requires statically linking a __morestack function // Stack growth requires statically linking a __morestack function
@ -1210,7 +1213,7 @@ fn link_args(sess: Session,
// Finally add all the linker arguments provided on the command line along // Finally add all the linker arguments provided on the command line along
// with any #[link_args] attributes found inside the crate // with any #[link_args] attributes found inside the crate
args.push_all(sess.opts.cg.link_args); args.push_all(sess.opts.cg.link_args.as_slice());
let used_link_args = sess.cstore.get_used_link_args(); let used_link_args = sess.cstore.get_used_link_args();
let used_link_args = used_link_args.borrow(); let used_link_args = used_link_args.borrow();
for arg in used_link_args.get().iter() { for arg in used_link_args.get().iter() {
@ -1230,7 +1233,7 @@ fn link_args(sess: Session,
// Also note that the native libraries linked here are only the ones located // Also note that the native libraries linked here are only the ones located
// in the current crate. Upstream crates with native library dependencies // in the current crate. Upstream crates with native library dependencies
// may have their native library pulled in above. // may have their native library pulled in above.
fn add_local_native_libraries(args: &mut ~[~str], sess: Session) { fn add_local_native_libraries(args: &mut Vec<~str> , sess: Session) {
let addl_lib_search_paths = sess.opts.addl_lib_search_paths.borrow(); let addl_lib_search_paths = sess.opts.addl_lib_search_paths.borrow();
for path in addl_lib_search_paths.get().iter() { for path in addl_lib_search_paths.get().iter() {
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
@ -1263,7 +1266,7 @@ fn add_local_native_libraries(args: &mut ~[~str], sess: Session) {
// Rust crates are not considered at all when creating an rlib output. All // Rust crates are not considered at all when creating an rlib output. All
// dependencies will be linked when producing the final output (instead of // dependencies will be linked when producing the final output (instead of
// the intermediate rlib version) // the intermediate rlib version)
fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session, fn add_upstream_rust_crates(args: &mut Vec<~str> , sess: Session,
dylib: bool, tmpdir: &Path) { dylib: bool, tmpdir: &Path) {
// As a limitation of the current implementation, we require that everything // As a limitation of the current implementation, we require that everything
@ -1347,7 +1350,7 @@ fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session,
// returning `None` if not all libraries could be found with that // returning `None` if not all libraries could be found with that
// preference. // preference.
fn get_deps(cstore: &cstore::CStore, preference: cstore::LinkagePreference) fn get_deps(cstore: &cstore::CStore, preference: cstore::LinkagePreference)
-> Option<~[(ast::CrateNum, Path)]> -> Option<Vec<(ast::CrateNum, Path)> >
{ {
let crates = cstore.get_used_crates(preference); let crates = cstore.get_used_crates(preference);
if crates.iter().all(|&(_, ref p)| p.is_some()) { if crates.iter().all(|&(_, ref p)| p.is_some()) {
@ -1358,8 +1361,8 @@ fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session,
} }
// Adds the static "rlib" versions of all crates to the command line. // Adds the static "rlib" versions of all crates to the command line.
fn add_static_crates(args: &mut ~[~str], sess: Session, tmpdir: &Path, fn add_static_crates(args: &mut Vec<~str> , sess: Session, tmpdir: &Path,
crates: ~[(ast::CrateNum, Path)]) { crates: Vec<(ast::CrateNum, Path)> ) {
for (cnum, cratepath) in crates.move_iter() { for (cnum, cratepath) in crates.move_iter() {
// When performing LTO on an executable output, all of the // When performing LTO on an executable output, all of the
// bytecode from the upstream libraries has already been // bytecode from the upstream libraries has already been
@ -1405,8 +1408,8 @@ fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session,
} }
// Same thing as above, but for dynamic crates instead of static crates. // Same thing as above, but for dynamic crates instead of static crates.
fn add_dynamic_crates(args: &mut ~[~str], sess: Session, fn add_dynamic_crates(args: &mut Vec<~str> , sess: Session,
crates: ~[(ast::CrateNum, Path)]) { crates: Vec<(ast::CrateNum, Path)> ) {
// If we're performing LTO, then it should have been previously required // If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format. // that all upstream rust dependencies were available in an rlib format.
assert!(!sess.lto()); assert!(!sess.lto());
@ -1440,7 +1443,7 @@ fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session,
// generic function calls a native function, then the generic function must // generic function calls a native function, then the generic function must
// be instantiated in the target crate, meaning that the native symbol must // be instantiated in the target crate, meaning that the native symbol must
// also be resolved in the target crate. // also be resolved in the target crate.
fn add_upstream_native_libraries(args: &mut ~[~str], sess: Session) { fn add_upstream_native_libraries(args: &mut Vec<~str> , sess: Session) {
let cstore = sess.cstore; let cstore = sess.cstore;
cstore.iter_crate_data(|cnum, _| { cstore.iter_crate_data(|cnum, _| {
let libs = csearch::get_native_libraries(cstore, cnum); let libs = csearch::get_native_libraries(cstore, cnum);

View File

@ -11,6 +11,7 @@
use back::target_strs; use back::target_strs;
use driver::session::sess_os_to_meta_os; use driver::session::sess_os_to_meta_os;
use metadata::loader::meta_section_name; use metadata::loader::meta_section_name;
use std::vec_ng::Vec;
use syntax::abi; use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t { pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
@ -63,6 +64,6 @@ pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::
target_triple: target_triple, target_triple: target_triple,
cc_args: ~[], cc_args: Vec::new(),
}; };
} }

View File

@ -15,21 +15,22 @@ use metadata::filesearch;
use collections::HashSet; use collections::HashSet;
use std::{os, vec}; use std::{os, vec};
use std::vec_ng::Vec;
use syntax::abi; use syntax::abi;
fn not_win32(os: abi::Os) -> bool { fn not_win32(os: abi::Os) -> bool {
os != abi::OsWin32 os != abi::OsWin32
} }
pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] { pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> Vec<~str> {
let os = sess.targ_cfg.os; let os = sess.targ_cfg.os;
// No rpath on windows // No rpath on windows
if os == abi::OsWin32 { if os == abi::OsWin32 {
return ~[]; return Vec::new();
} }
let mut flags = ~[]; let mut flags = Vec::new();
if sess.targ_cfg.os == abi::OsFreebsd { if sess.targ_cfg.os == abi::OsFreebsd {
flags.push_all([~"-Wl,-rpath,/usr/local/lib/gcc46", flags.push_all([~"-Wl,-rpath,/usr/local/lib/gcc46",
@ -49,7 +50,7 @@ pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] {
let rpaths = get_rpaths(os, sysroot, output, libs, let rpaths = get_rpaths(os, sysroot, output, libs,
sess.opts.target_triple); sess.opts.target_triple);
flags.push_all(rpaths_to_flags(rpaths)); flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice());
flags flags
} }
@ -60,8 +61,8 @@ fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path {
p p
} }
pub fn rpaths_to_flags(rpaths: &[~str]) -> ~[~str] { pub fn rpaths_to_flags(rpaths: &[~str]) -> Vec<~str> {
let mut ret = ~[]; let mut ret = Vec::new();
for rpath in rpaths.iter() { for rpath in rpaths.iter() {
ret.push("-Wl,-rpath," + *rpath); ret.push("-Wl,-rpath," + *rpath);
} }
@ -72,7 +73,7 @@ fn get_rpaths(os: abi::Os,
sysroot: &Path, sysroot: &Path,
output: &Path, output: &Path,
libs: &[Path], libs: &[Path],
target_triple: &str) -> ~[~str] { target_triple: &str) -> Vec<~str> {
debug!("sysroot: {}", sysroot.display()); debug!("sysroot: {}", sysroot.display());
debug!("output: {}", output.display()); debug!("output: {}", output.display());
debug!("libs:"); debug!("libs:");
@ -91,7 +92,7 @@ fn get_rpaths(os: abi::Os,
let abs_rpaths = get_absolute_rpaths(libs); let abs_rpaths = get_absolute_rpaths(libs);
// And a final backup rpath to the global library location. // And a final backup rpath to the global library location.
let fallback_rpaths = ~[get_install_prefix_rpath(target_triple)]; let fallback_rpaths = vec!(get_install_prefix_rpath(target_triple));
fn log_rpaths(desc: &str, rpaths: &[~str]) { fn log_rpaths(desc: &str, rpaths: &[~str]) {
debug!("{} rpaths:", desc); debug!("{} rpaths:", desc);
@ -100,22 +101,22 @@ fn get_rpaths(os: abi::Os,
} }
} }
log_rpaths("relative", rel_rpaths); log_rpaths("relative", rel_rpaths.as_slice());
log_rpaths("absolute", abs_rpaths); log_rpaths("absolute", abs_rpaths.as_slice());
log_rpaths("fallback", fallback_rpaths); log_rpaths("fallback", fallback_rpaths.as_slice());
let mut rpaths = rel_rpaths; let mut rpaths = rel_rpaths;
rpaths.push_all(abs_rpaths); rpaths.push_all(abs_rpaths.as_slice());
rpaths.push_all(fallback_rpaths); rpaths.push_all(fallback_rpaths.as_slice());
// Remove duplicates // Remove duplicates
let rpaths = minimize_rpaths(rpaths); let rpaths = minimize_rpaths(rpaths.as_slice());
return rpaths; return rpaths;
} }
fn get_rpaths_relative_to_output(os: abi::Os, fn get_rpaths_relative_to_output(os: abi::Os,
output: &Path, output: &Path,
libs: &[Path]) -> ~[~str] { libs: &[Path]) -> Vec<~str> {
libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect() libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect()
} }
@ -145,7 +146,7 @@ pub fn get_rpath_relative_to_output(os: abi::Os,
prefix+"/"+relative.as_str().expect("non-utf8 component in path") prefix+"/"+relative.as_str().expect("non-utf8 component in path")
} }
fn get_absolute_rpaths(libs: &[Path]) -> ~[~str] { fn get_absolute_rpaths(libs: &[Path]) -> Vec<~str> {
libs.iter().map(|a| get_absolute_rpath(a)).collect() libs.iter().map(|a| get_absolute_rpath(a)).collect()
} }
@ -167,9 +168,9 @@ pub fn get_install_prefix_rpath(target_triple: &str) -> ~str {
path.as_str().expect("non-utf8 component in rpath").to_owned() path.as_str().expect("non-utf8 component in rpath").to_owned()
} }
pub fn minimize_rpaths(rpaths: &[~str]) -> ~[~str] { pub fn minimize_rpaths(rpaths: &[~str]) -> Vec<~str> {
let mut set = HashSet::new(); let mut set = HashSet::new();
let mut minimized = ~[]; let mut minimized = Vec::new();
for rpath in rpaths.iter() { for rpath in rpaths.iter() {
if set.insert(rpath.as_slice()) { if set.insert(rpath.as_slice()) {
minimized.push(rpath.clone()); minimized.push(rpath.clone());
@ -190,7 +191,7 @@ mod test {
#[test] #[test]
fn test_rpaths_to_flags() { fn test_rpaths_to_flags() {
let flags = rpaths_to_flags([~"path1", ~"path2"]); let flags = rpaths_to_flags([~"path1", ~"path2"]);
assert_eq!(flags, ~[~"-Wl,-rpath,path1", ~"-Wl,-rpath,path2"]); assert_eq!(flags, vec!(~"-Wl,-rpath,path1", ~"-Wl,-rpath,path2"));
} }
#[test] #[test]

View File

@ -10,10 +10,12 @@
#[allow(non_camel_case_types)]; #[allow(non_camel_case_types)];
use std::vec_ng::Vec;
pub struct t { pub struct t {
module_asm: ~str, module_asm: ~str,
meta_sect_name: ~str, meta_sect_name: ~str,
data_layout: ~str, data_layout: ~str,
target_triple: ~str, target_triple: ~str,
cc_args: ~[~str], cc_args: Vec<~str> ,
} }

View File

@ -46,6 +46,6 @@ pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::
target_triple: target_triple, target_triple: target_triple,
cc_args: ~[~"-m32"], cc_args: vec!(~"-m32"),
}; };
} }

View File

@ -54,6 +54,6 @@ pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::
target_triple: target_triple, target_triple: target_triple,
cc_args: ~[~"-m64"], cc_args: vec!(~"-m64"),
}; };
} }

View File

@ -11,8 +11,8 @@
use back::link; use back::link;
use back::{arm, x86, x86_64, mips}; use back::{arm, x86, x86_64, mips};
use driver::session::{Aggressive, CrateTypeExecutable, FullDebugInfo, LimitedDebugInfo, use driver::session::{Aggressive, CrateTypeExecutable, CrateType,
NoDebugInfo}; FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
use driver::session::{Session, Session_, No, Less, Default}; use driver::session::{Session, Session_, No, Less, Default};
use driver::session; use driver::session;
use front; use front;
@ -36,7 +36,6 @@ use std::io;
use std::io::fs; use std::io::fs;
use std::io::MemReader; use std::io::MemReader;
use std::os; use std::os;
use std::vec;
use std::vec_ng::Vec; use std::vec_ng::Vec;
use std::vec_ng; use std::vec_ng;
use collections::HashMap; use collections::HashMap;
@ -145,7 +144,7 @@ pub fn build_configuration(sess: Session) -> ast::CrateConfig {
} }
// Convert strings provided as --cfg [cfgspec] into a crate_cfg // Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: ~[~str]) fn parse_cfgspecs(cfgspecs: Vec<~str> )
-> ast::CrateConfig { -> ast::CrateConfig {
cfgspecs.move_iter().map(|s| { cfgspecs.move_iter().map(|s| {
let sess = parse::new_parse_sess(); let sess = parse::new_parse_sess();
@ -399,8 +398,8 @@ pub struct CrateTranslation {
module: ModuleRef, module: ModuleRef,
metadata_module: ModuleRef, metadata_module: ModuleRef,
link: LinkMeta, link: LinkMeta,
metadata: ~[u8], metadata: Vec<u8> ,
reachable: ~[~str], reachable: Vec<~str> ,
} }
/// Run the translation phase to LLVM, after which the AST and analysis can /// Run the translation phase to LLVM, after which the AST and analysis can
@ -434,7 +433,7 @@ pub fn phase_5_run_llvm_passes(sess: Session,
time(sess.time_passes(), "LLVM passes", (), |_| time(sess.time_passes(), "LLVM passes", (), |_|
link::write::run_passes(sess, link::write::run_passes(sess,
trans, trans,
sess.opts.output_types, sess.opts.output_types.as_slice(),
outputs)); outputs));
} }
} }
@ -489,7 +488,7 @@ fn write_out_deps(sess: Session,
krate: &ast::Crate) -> io::IoResult<()> { krate: &ast::Crate) -> io::IoResult<()> {
let id = link::find_crate_id(krate.attrs.as_slice(), outputs); let id = link::find_crate_id(krate.attrs.as_slice(), outputs);
let mut out_filenames = ~[]; let mut out_filenames = Vec::new();
for output_type in sess.opts.output_types.iter() { for output_type in sess.opts.output_types.iter() {
let file = outputs.path(*output_type); let file = outputs.path(*output_type);
match *output_type { match *output_type {
@ -524,7 +523,7 @@ fn write_out_deps(sess: Session,
// Build a list of files used to compile the output and // Build a list of files used to compile the output and
// write Makefile-compatible dependency rules // write Makefile-compatible dependency rules
let files: ~[~str] = { let files: Vec<~str> = {
let files = sess.codemap.files.borrow(); let files = sess.codemap.files.borrow();
files.get() files.get()
.iter() .iter()
@ -767,18 +766,21 @@ pub fn host_triple() -> ~str {
pub fn build_session_options(matches: &getopts::Matches) pub fn build_session_options(matches: &getopts::Matches)
-> @session::Options { -> @session::Options {
let crate_types = matches.opt_strs("crate-type").flat_map(|s| { let mut crate_types: Vec<CrateType> = Vec::new();
s.split(',').map(|part| { let unparsed_crate_types = matches.opt_strs("crate-type");
match part { for unparsed_crate_type in unparsed_crate_types.iter() {
for part in unparsed_crate_type.split(',') {
let new_part = match part {
"lib" => session::default_lib_output(), "lib" => session::default_lib_output(),
"rlib" => session::CrateTypeRlib, "rlib" => session::CrateTypeRlib,
"staticlib" => session::CrateTypeStaticlib, "staticlib" => session::CrateTypeStaticlib,
"dylib" => session::CrateTypeDylib, "dylib" => session::CrateTypeDylib,
"bin" => session::CrateTypeExecutable, "bin" => session::CrateTypeExecutable,
_ => early_error(format!("unknown crate type: `{}`", part)) _ => early_error(format!("unknown crate type: `{}`", part))
} };
}).collect() crate_types.push(new_part)
}); }
}
let parse_only = matches.opt_present("parse-only"); let parse_only = matches.opt_present("parse-only");
let no_trans = matches.opt_present("no-trans"); let no_trans = matches.opt_present("no-trans");
@ -786,15 +788,17 @@ pub fn build_session_options(matches: &getopts::Matches)
let lint_levels = [lint::allow, lint::warn, let lint_levels = [lint::allow, lint::warn,
lint::deny, lint::forbid]; lint::deny, lint::forbid];
let mut lint_opts = ~[]; let mut lint_opts = Vec::new();
let lint_dict = lint::get_lint_dict(); let lint_dict = lint::get_lint_dict();
for level in lint_levels.iter() { for level in lint_levels.iter() {
let level_name = lint::level_to_str(*level); let level_name = lint::level_to_str(*level);
let level_short = level_name.slice_chars(0, 1); let level_short = level_name.slice_chars(0, 1);
let level_short = level_short.to_ascii().to_upper().into_str(); let level_short = level_short.to_ascii().to_upper().into_str();
let flags = vec::append(matches.opt_strs(level_short), let flags = vec_ng::append(matches.opt_strs(level_short)
matches.opt_strs(level_name)); .move_iter()
.collect(),
matches.opt_strs(level_name));
for lint_name in flags.iter() { for lint_name in flags.iter() {
let lint_name = lint_name.replace("-", "_"); let lint_name = lint_name.replace("-", "_");
match lint_dict.find_equiv(&lint_name) { match lint_dict.find_equiv(&lint_name) {
@ -828,23 +832,24 @@ pub fn build_session_options(matches: &getopts::Matches)
unsafe { llvm::LLVMSetDebug(1); } unsafe { llvm::LLVMSetDebug(1); }
} }
let mut output_types = if parse_only || no_trans { let mut output_types = Vec::new();
~[] if !parse_only && !no_trans {
} else { let unparsed_output_types = matches.opt_strs("emit");
matches.opt_strs("emit").flat_map(|s| { for unparsed_output_type in unparsed_output_types.iter() {
s.split(',').map(|part| { for part in unparsed_output_type.split(',') {
match part.as_slice() { let output_type = match part.as_slice() {
"asm" => link::OutputTypeAssembly, "asm" => link::OutputTypeAssembly,
"ir" => link::OutputTypeLlvmAssembly, "ir" => link::OutputTypeLlvmAssembly,
"bc" => link::OutputTypeBitcode, "bc" => link::OutputTypeBitcode,
"obj" => link::OutputTypeObject, "obj" => link::OutputTypeObject,
"link" => link::OutputTypeExe, "link" => link::OutputTypeExe,
_ => early_error(format!("unknown emission type: `{}`", part)) _ => early_error(format!("unknown emission type: `{}`", part))
} };
}).collect() output_types.push(output_type)
}) }
}
}; };
output_types.sort(); output_types.as_mut_slice().sort();
output_types.dedup(); output_types.dedup();
if output_types.len() == 0 { if output_types.len() == 0 {
output_types.push(link::OutputTypeExe); output_types.push(link::OutputTypeExe);
@ -890,7 +895,7 @@ pub fn build_session_options(matches: &getopts::Matches)
Path::new(s.as_slice()) Path::new(s.as_slice())
}).move_iter().collect(); }).move_iter().collect();
let cfg = parse_cfgspecs(matches.opt_strs("cfg")); let cfg = parse_cfgspecs(matches.opt_strs("cfg").move_iter().collect());
let test = matches.opt_present("test"); let test = matches.opt_present("test");
let write_dependency_info = (matches.opt_present("dep-info"), let write_dependency_info = (matches.opt_present("dep-info"),
matches.opt_str("dep-info").map(|p| Path::new(p))); matches.opt_str("dep-info").map(|p| Path::new(p)));
@ -1005,7 +1010,7 @@ pub fn build_session_(sopts: @session::Options,
working_dir: os::getcwd(), working_dir: os::getcwd(),
lints: RefCell::new(HashMap::new()), lints: RefCell::new(HashMap::new()),
node_id: Cell::new(1), node_id: Cell::new(1),
crate_types: @RefCell::new(~[]), crate_types: @RefCell::new(Vec::new()),
features: front::feature_gate::Features::new() features: front::feature_gate::Features::new()
} }
} }
@ -1026,8 +1031,8 @@ pub fn parse_pretty(sess: Session, name: &str) -> PpMode {
} }
// rustc command line options // rustc command line options
pub fn optgroups() -> ~[getopts::OptGroup] { pub fn optgroups() -> Vec<getopts::OptGroup> {
~[ vec!(
optflag("h", "help", "Display this message"), optflag("h", "help", "Display this message"),
optmulti("", "cfg", "Configure the compilation environment", "SPEC"), optmulti("", "cfg", "Configure the compilation environment", "SPEC"),
optmulti("L", "", "Add a directory to the library search path", "PATH"), optmulti("L", "", "Add a directory to the library search path", "PATH"),
@ -1071,8 +1076,7 @@ pub fn optgroups() -> ~[getopts::OptGroup] {
optmulti("F", "forbid", "Set lint forbidden", "OPT"), optmulti("F", "forbid", "Set lint forbidden", "OPT"),
optmulti("C", "codegen", "Set a codegen option", "OPT[=VALUE]"), optmulti("C", "codegen", "Set a codegen option", "OPT[=VALUE]"),
optmulti("Z", "", "Set internal debugging options", "FLAG"), optmulti("Z", "", "Set internal debugging options", "FLAG"),
optflag( "v", "version", "Print version info and exit"), optflag( "v", "version", "Print version info and exit"))
]
} }
pub struct OutputFilenames { pub struct OutputFilenames {
@ -1188,7 +1192,7 @@ mod test {
#[test] #[test]
fn test_switch_implies_cfg_test() { fn test_switch_implies_cfg_test() {
let matches = let matches =
&match getopts([~"--test"], optgroups()) { &match getopts([~"--test"], optgroups().as_slice()) {
Ok(m) => m, Ok(m) => m,
Err(f) => fail!("test_switch_implies_cfg_test: {}", f.to_err_msg()) Err(f) => fail!("test_switch_implies_cfg_test: {}", f.to_err_msg())
}; };
@ -1203,7 +1207,8 @@ mod test {
#[test] #[test]
fn test_switch_implies_cfg_test_unless_cfg_test() { fn test_switch_implies_cfg_test_unless_cfg_test() {
let matches = let matches =
&match getopts([~"--test", ~"--cfg=test"], optgroups()) { &match getopts([~"--test", ~"--cfg=test"],
optgroups().as_slice()) {
Ok(m) => m, Ok(m) => m,
Err(f) => { Err(f) => {
fail!("test_switch_implies_cfg_test_unless_cfg_test: {}", fail!("test_switch_implies_cfg_test_unless_cfg_test: {}",

View File

@ -74,8 +74,8 @@ debugging_opts!(
0 0
) )
pub fn debugging_opts_map() -> ~[(&'static str, &'static str, u64)] { pub fn debugging_opts_map() -> Vec<(&'static str, &'static str, u64)> {
~[("verbose", "in general, enable more debug printouts", VERBOSE), vec!(("verbose", "in general, enable more debug printouts", VERBOSE),
("time-passes", "measure time of each rustc pass", TIME_PASSES), ("time-passes", "measure time of each rustc pass", TIME_PASSES),
("count-llvm-insns", "count where LLVM \ ("count-llvm-insns", "count where LLVM \
instrs originate", COUNT_LLVM_INSNS), instrs originate", COUNT_LLVM_INSNS),
@ -102,8 +102,7 @@ pub fn debugging_opts_map() -> ~[(&'static str, &'static str, u64)] {
PRINT_LLVM_PASSES), PRINT_LLVM_PASSES),
("lto", "Perform LLVM link-time optimizations", LTO), ("lto", "Perform LLVM link-time optimizations", LTO),
("ast-json", "Print the AST as JSON and halt", AST_JSON), ("ast-json", "Print the AST as JSON and halt", AST_JSON),
("ast-json-noexpand", "Print the pre-expansion AST as JSON and halt", AST_JSON_NOEXPAND), ("ast-json-noexpand", "Print the pre-expansion AST as JSON and halt", AST_JSON_NOEXPAND))
]
} }
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
@ -125,13 +124,13 @@ pub enum DebugInfoLevel {
pub struct Options { pub struct Options {
// The crate config requested for the session, which may be combined // The crate config requested for the session, which may be combined
// with additional crate configurations during the compile process // with additional crate configurations during the compile process
crate_types: ~[CrateType], crate_types: Vec<CrateType> ,
gc: bool, gc: bool,
optimize: OptLevel, optimize: OptLevel,
debuginfo: DebugInfoLevel, debuginfo: DebugInfoLevel,
lint_opts: ~[(lint::Lint, lint::level)], lint_opts: Vec<(lint::Lint, lint::level)> ,
output_types: ~[back::link::OutputType], output_types: Vec<back::link::OutputType> ,
// This was mutable for rustpkg, which updates search paths based on the // This was mutable for rustpkg, which updates search paths based on the
// parsed code. It remains mutable in case its replacements wants to use // parsed code. It remains mutable in case its replacements wants to use
// this. // this.
@ -192,9 +191,9 @@ pub struct Session_ {
local_crate_source_file: Option<Path>, local_crate_source_file: Option<Path>,
working_dir: Path, working_dir: Path,
lints: RefCell<HashMap<ast::NodeId, lints: RefCell<HashMap<ast::NodeId,
~[(lint::Lint, codemap::Span, ~str)]>>, Vec<(lint::Lint, codemap::Span, ~str)> >>,
node_id: Cell<ast::NodeId>, node_id: Cell<ast::NodeId>,
crate_types: @RefCell<~[CrateType]>, crate_types: @RefCell<Vec<CrateType> >,
features: front::feature_gate::Features features: front::feature_gate::Features
} }
@ -259,7 +258,7 @@ impl Session_ {
Some(arr) => { arr.push((lint, sp, msg)); return; } Some(arr) => { arr.push((lint, sp, msg)); return; }
None => {} None => {}
} }
lints.get().insert(id, ~[(lint, sp, msg)]); lints.get().insert(id, vec!((lint, sp, msg)));
} }
pub fn next_node_id(&self) -> ast::NodeId { pub fn next_node_id(&self) -> ast::NodeId {
self.reserve_node_ids(1) self.reserve_node_ids(1)
@ -318,12 +317,12 @@ impl Session_ {
/// Some reasonable defaults /// Some reasonable defaults
pub fn basic_options() -> @Options { pub fn basic_options() -> @Options {
@Options { @Options {
crate_types: ~[], crate_types: Vec::new(),
gc: false, gc: false,
optimize: No, optimize: No,
debuginfo: NoDebugInfo, debuginfo: NoDebugInfo,
lint_opts: ~[], lint_opts: Vec::new(),
output_types: ~[], output_types: Vec::new(),
addl_lib_search_paths: @RefCell::new(HashSet::new()), addl_lib_search_paths: @RefCell::new(HashSet::new()),
maybe_sysroot: None, maybe_sysroot: None,
target_triple: host_triple(), target_triple: host_triple(),
@ -394,7 +393,8 @@ macro_rules! cgoptions(
} }
} }
fn parse_list(slot: &mut ~[~str], v: Option<&str>) -> bool { fn parse_list(slot: &mut ::std::vec_ng::Vec<~str>, v: Option<&str>)
-> bool {
match v { match v {
Some(s) => { Some(s) => {
for s in s.words() { for s in s.words() {
@ -414,15 +414,15 @@ cgoptions!(
"tool to assemble archives with"), "tool to assemble archives with"),
linker: Option<~str> = (None, parse_opt_string, linker: Option<~str> = (None, parse_opt_string,
"system linker to link outputs with"), "system linker to link outputs with"),
link_args: ~[~str] = (~[], parse_list, link_args: Vec<~str> = (Vec::new(), parse_list,
"extra arguments to pass to the linker (space separated)"), "extra arguments to pass to the linker (space separated)"),
target_cpu: ~str = (~"generic", parse_string, target_cpu: ~str = (~"generic", parse_string,
"select target processor (llc -mcpu=help for details)"), "select target processor (llc -mcpu=help for details)"),
target_feature: ~str = (~"", parse_string, target_feature: ~str = (~"", parse_string,
"target specific attributes (llc -mattr=help for details)"), "target specific attributes (llc -mattr=help for details)"),
passes: ~[~str] = (~[], parse_list, passes: Vec<~str> = (Vec::new(), parse_list,
"a list of extra LLVM passes to run (space separated)"), "a list of extra LLVM passes to run (space separated)"),
llvm_args: ~[~str] = (~[], parse_list, llvm_args: Vec<~str> = (Vec::new(), parse_list,
"a list of arguments to pass to llvm (space separated)"), "a list of arguments to pass to llvm (space separated)"),
save_temps: bool = (false, parse_bool, save_temps: bool = (false, parse_bool,
"save all temporary output files during compilation"), "save all temporary output files during compilation"),
@ -476,11 +476,11 @@ pub fn default_lib_output() -> CrateType {
} }
pub fn collect_crate_types(session: &Session, pub fn collect_crate_types(session: &Session,
attrs: &[ast::Attribute]) -> ~[CrateType] { attrs: &[ast::Attribute]) -> Vec<CrateType> {
// If we're generating a test executable, then ignore all other output // If we're generating a test executable, then ignore all other output
// styles at all other locations // styles at all other locations
if session.opts.test { if session.opts.test {
return ~[CrateTypeExecutable]; return vec!(CrateTypeExecutable)
} }
let mut base = session.opts.crate_types.clone(); let mut base = session.opts.crate_types.clone();
let mut iter = attrs.iter().filter_map(|a| { let mut iter = attrs.iter().filter_map(|a| {
@ -516,7 +516,7 @@ pub fn collect_crate_types(session: &Session,
if base.len() == 0 { if base.len() == 0 {
base.push(CrateTypeExecutable); base.push(CrateTypeExecutable);
} }
base.sort(); base.as_mut_slice().sort();
base.dedup(); base.dedup();
return base; return base;
} }

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use std::vec_ng::Vec;
use syntax::fold::Folder; use syntax::fold::Folder;
use syntax::{ast, fold, attr}; use syntax::{ast, fold, attr};
use syntax::codemap; use syntax::codemap;
@ -58,7 +58,7 @@ fn filter_view_item<'r>(cx: &Context, view_item: &'r ast::ViewItem)
} }
fn fold_mod(cx: &mut Context, m: &ast::Mod) -> ast::Mod { fn fold_mod(cx: &mut Context, m: &ast::Mod) -> ast::Mod {
let filtered_items: ~[&@ast::Item] = m.items.iter() let filtered_items: Vec<&@ast::Item> = m.items.iter()
.filter(|&a| item_in_cfg(cx, *a)) .filter(|&a| item_in_cfg(cx, *a))
.collect(); .collect();
let flattened_items = filtered_items.move_iter() let flattened_items = filtered_items.move_iter()
@ -170,7 +170,7 @@ fn retain_stmt(cx: &Context, stmt: @ast::Stmt) -> bool {
} }
fn fold_block(cx: &mut Context, b: ast::P<ast::Block>) -> ast::P<ast::Block> { fn fold_block(cx: &mut Context, b: ast::P<ast::Block>) -> ast::P<ast::Block> {
let resulting_stmts: ~[&@ast::Stmt] = let resulting_stmts: Vec<&@ast::Stmt> =
b.stmts.iter().filter(|&a| retain_stmt(cx, *a)).collect(); b.stmts.iter().filter(|&a| retain_stmt(cx, *a)).collect();
let resulting_stmts = resulting_stmts.move_iter() let resulting_stmts = resulting_stmts.move_iter()
.flat_map(|&stmt| cx.fold_stmt(stmt).move_iter()) .flat_map(|&stmt| cx.fold_stmt(stmt).move_iter())

View File

@ -31,6 +31,7 @@ use syntax::parse::token;
use driver::session::Session; use driver::session::Session;
use std::cell::Cell; use std::cell::Cell;
use std::vec_ng::Vec;
/// This is a list of all known features since the beginning of time. This list /// This is a list of all known features since the beginning of time. This list
/// can never shrink, it may only be expanded (in order to prevent old programs /// can never shrink, it may only be expanded (in order to prevent old programs
@ -85,7 +86,7 @@ impl Features {
} }
struct Context { struct Context {
features: ~[&'static str], features: Vec<&'static str> ,
sess: Session, sess: Session,
} }
@ -280,7 +281,7 @@ impl Visitor<()> for Context {
pub fn check_crate(sess: Session, krate: &ast::Crate) { pub fn check_crate(sess: Session, krate: &ast::Crate) {
let mut cx = Context { let mut cx = Context {
features: ~[], features: Vec::new(),
sess: sess, sess: sess,
}; };

View File

@ -40,7 +40,7 @@ use syntax::util::small_vector::SmallVector;
struct Test { struct Test {
span: Span, span: Span,
path: ~[ast::Ident], path: Vec<ast::Ident> ,
bench: bool, bench: bool,
ignore: bool, ignore: bool,
should_fail: bool should_fail: bool
@ -48,9 +48,9 @@ struct Test {
struct TestCtxt<'a> { struct TestCtxt<'a> {
sess: session::Session, sess: session::Session,
path: RefCell<~[ast::Ident]>, path: RefCell<Vec<ast::Ident> >,
ext_cx: ExtCtxt<'a>, ext_cx: ExtCtxt<'a>,
testfns: RefCell<~[Test]>, testfns: RefCell<Vec<Test> >,
is_test_crate: bool, is_test_crate: bool,
config: ast::CrateConfig, config: ast::CrateConfig,
} }
@ -93,7 +93,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
path.get().push(i.ident); path.get().push(i.ident);
} }
debug!("current path: {}", debug!("current path: {}",
ast_util::path_name_i(self.cx.path.get())); ast_util::path_name_i(self.cx.path.get().as_slice()));
if is_test_fn(&self.cx, i) || is_bench_fn(i) { if is_test_fn(&self.cx, i) || is_bench_fn(i) {
match i.node { match i.node {
@ -171,8 +171,8 @@ fn generate_test_harness(sess: session::Session, krate: ast::Crate)
loader: loader, loader: loader,
deriving_hash_type_parameter: false, deriving_hash_type_parameter: false,
}), }),
path: RefCell::new(~[]), path: RefCell::new(Vec::new()),
testfns: RefCell::new(~[]), testfns: RefCell::new(Vec::new()),
is_test_crate: is_test_crate(&krate), is_test_crate: is_test_crate(&krate),
config: krate.config.clone(), config: krate.config.clone(),
}; };
@ -303,7 +303,7 @@ fn mk_std(cx: &TestCtxt) -> ast::ViewItem {
let vi = if cx.is_test_crate { let vi = if cx.is_test_crate {
ast::ViewItemUse( ast::ViewItemUse(
vec!(@nospan(ast::ViewPathSimple(id_test, vec!(@nospan(ast::ViewPathSimple(id_test,
path_node(~[id_test]), path_node(vec!(id_test)),
ast::DUMMY_NODE_ID)))) ast::DUMMY_NODE_ID))))
} else { } else {
ast::ViewItemExternCrate(id_test, ast::ViewItemExternCrate(id_test,
@ -363,7 +363,7 @@ fn nospan<T>(t: T) -> codemap::Spanned<T> {
codemap::Spanned { node: t, span: DUMMY_SP } codemap::Spanned { node: t, span: DUMMY_SP }
} }
fn path_node(ids: ~[ast::Ident]) -> ast::Path { fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path { ast::Path {
span: DUMMY_SP, span: DUMMY_SP,
global: false, global: false,
@ -375,7 +375,7 @@ fn path_node(ids: ~[ast::Ident]) -> ast::Path {
} }
} }
fn path_node_global(ids: ~[ast::Ident]) -> ast::Path { fn path_node_global(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path { ast::Path {
span: DUMMY_SP, span: DUMMY_SP,
global: true, global: true,
@ -432,11 +432,12 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
let span = test.span; let span = test.span;
let path = test.path.clone(); let path = test.path.clone();
debug!("encoding {}", ast_util::path_name_i(path)); debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
let name_lit: ast::Lit = let name_lit: ast::Lit =
nospan(ast::LitStr(token::intern_and_get_ident( nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path)), ast::CookedStr)); ast_util::path_name_i(path.as_slice())),
ast::CookedStr));
let name_expr = @ast::Expr { let name_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,

View File

@ -53,8 +53,8 @@ use std::io;
use std::os; use std::os;
use std::str; use std::str;
use std::task; use std::task;
use std::vec;
use std::vec_ng::Vec; use std::vec_ng::Vec;
use std::vec_ng;
use syntax::ast; use syntax::ast;
use syntax::diagnostic::Emitter; use syntax::diagnostic::Emitter;
use syntax::diagnostic; use syntax::diagnostic;
@ -149,7 +149,7 @@ Additional help:
-C help Print codegen options -C help Print codegen options
-W help Print 'lint' options and default settings -W help Print 'lint' options and default settings
-Z help Print internal options for debugging rustc\n", -Z help Print internal options for debugging rustc\n",
getopts::usage(message, d::optgroups())); getopts::usage(message, d::optgroups().as_slice()));
} }
pub fn describe_warnings() { pub fn describe_warnings() {
@ -164,8 +164,8 @@ Available lint options:
let lint_dict = lint::get_lint_dict(); let lint_dict = lint::get_lint_dict();
let mut lint_dict = lint_dict.move_iter() let mut lint_dict = lint_dict.move_iter()
.map(|(k, v)| (v, k)) .map(|(k, v)| (v, k))
.collect::<~[(lint::LintSpec, &'static str)]>(); .collect::<Vec<(lint::LintSpec, &'static str)> >();
lint_dict.sort(); lint_dict.as_mut_slice().sort();
let mut max_key = 0; let mut max_key = 0;
for &(_, name) in lint_dict.iter() { for &(_, name) in lint_dict.iter() {
@ -224,7 +224,7 @@ pub fn run_compiler(args: &[~str]) {
if args.is_empty() { usage(binary); return; } if args.is_empty() { usage(binary); return; }
let matches = let matches =
&match getopts::getopts(args, d::optgroups()) { &match getopts::getopts(args, d::optgroups().as_slice()) {
Ok(m) => m, Ok(m) => m,
Err(f) => { Err(f) => {
d::early_error(f.to_err_msg()); d::early_error(f.to_err_msg());
@ -236,8 +236,10 @@ pub fn run_compiler(args: &[~str]) {
return; return;
} }
let lint_flags = vec::append(matches.opt_strs("W"), let lint_flags = vec_ng::append(matches.opt_strs("W")
matches.opt_strs("warn")); .move_iter()
.collect(),
matches.opt_strs("warn"));
if lint_flags.iter().any(|x| x == &~"help") { if lint_flags.iter().any(|x| x == &~"help") {
describe_warnings(); describe_warnings();
return; return;
@ -312,8 +314,8 @@ pub fn run_compiler(args: &[~str]) {
if crate_id || crate_name || crate_file_name { if crate_id || crate_name || crate_file_name {
let attrs = parse_crate_attrs(sess, &input); let attrs = parse_crate_attrs(sess, &input);
let t_outputs = d::build_output_filenames(&input, &odir, &ofile, let t_outputs = d::build_output_filenames(&input, &odir, &ofile,
attrs, sess); attrs.as_slice(), sess);
let id = link::find_crate_id(attrs, &t_outputs); let id = link::find_crate_id(attrs.as_slice(), &t_outputs);
if crate_id { if crate_id {
println!("{}", id.to_str()); println!("{}", id.to_str());
@ -322,7 +324,8 @@ pub fn run_compiler(args: &[~str]) {
println!("{}", id.name); println!("{}", id.name);
} }
if crate_file_name { if crate_file_name {
let crate_types = session::collect_crate_types(&sess, attrs); let crate_types = session::collect_crate_types(&sess,
attrs.as_slice());
for &style in crate_types.iter() { for &style in crate_types.iter() {
let fname = link::filename_for_input(&sess, style, &id, let fname = link::filename_for_input(&sess, style, &id,
&t_outputs.with_extension("")); &t_outputs.with_extension(""));
@ -337,7 +340,7 @@ pub fn run_compiler(args: &[~str]) {
} }
fn parse_crate_attrs(sess: session::Session, input: &d::Input) -> fn parse_crate_attrs(sess: session::Session, input: &d::Input) ->
~[ast::Attribute] { Vec<ast::Attribute> {
let result = match *input { let result = match *input {
d::FileInput(ref ifile) => { d::FileInput(ref ifile) => {
parse::parse_crate_attrs_from_file(ifile, parse::parse_crate_attrs_from_file(ifile,

View File

@ -46,7 +46,7 @@ pub fn read_crates(sess: Session,
let mut e = Env { let mut e = Env {
sess: sess, sess: sess,
os: os, os: os,
crate_cache: @RefCell::new(~[]), crate_cache: @RefCell::new(Vec::new()),
next_crate_num: 1, next_crate_num: 1,
intr: intr intr: intr
}; };
@ -58,8 +58,10 @@ pub fn read_crates(sess: Session,
visit::walk_crate(&mut v, krate, ()); visit::walk_crate(&mut v, krate, ());
} }
let crate_cache = e.crate_cache.borrow(); let crate_cache = e.crate_cache.borrow();
dump_crates(*crate_cache.get()); dump_crates(crate_cache.get().as_slice());
warn_if_multiple_versions(&mut e, sess.diagnostic(), *crate_cache.get()); warn_if_multiple_versions(&mut e,
sess.diagnostic(),
crate_cache.get().as_slice());
} }
struct ReadCrateVisitor<'a> { struct ReadCrateVisitor<'a> {
@ -121,7 +123,7 @@ fn warn_if_multiple_versions(e: &mut Env,
struct Env { struct Env {
sess: Session, sess: Session,
os: loader::Os, os: loader::Os,
crate_cache: @RefCell<~[cache_entry]>, crate_cache: @RefCell<Vec<cache_entry>>,
next_crate_num: ast::CrateNum, next_crate_num: ast::CrateNum,
intr: @IdentInterner intr: @IdentInterner
} }
@ -401,7 +403,7 @@ impl Loader {
env: Env { env: Env {
sess: sess, sess: sess,
os: os, os: os,
crate_cache: @RefCell::new(~[]), crate_cache: @RefCell::new(Vec::new()),
next_crate_num: 1, next_crate_num: 1,
intr: token::get_ident_interner(), intr: token::get_ident_interner(),
} }

View File

@ -18,9 +18,10 @@ use metadata::decoder;
use middle::ty; use middle::ty;
use middle::typeck; use middle::typeck;
use std::vec;
use reader = serialize::ebml::reader; use reader = serialize::ebml::reader;
use std::rc::Rc; use std::rc::Rc;
use std::vec_ng::Vec;
use std::vec_ng;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
use syntax::diagnostic::expect; use syntax::diagnostic::expect;
@ -86,14 +87,15 @@ pub fn each_top_level_item_of_crate(cstore: @cstore::CStore,
callback) callback)
} }
pub fn get_item_path(tcx: ty::ctxt, def: ast::DefId) -> ~[ast_map::PathElem] { pub fn get_item_path(tcx: ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem> {
let cstore = tcx.cstore; let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
let path = decoder::get_item_path(cdata, def.node); let path = decoder::get_item_path(cdata, def.node);
// FIXME #1920: This path is not always correct if the crate is not linked // FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace. // into the root namespace.
vec::append(~[ast_map::PathMod(token::intern(cdata.name))], path) vec_ng::append(vec!(ast_map::PathMod(token::intern(cdata.name))),
path.as_slice())
} }
pub enum found_ast { pub enum found_ast {
@ -114,7 +116,7 @@ pub fn maybe_get_item_ast(tcx: ty::ctxt, def: ast::DefId,
} }
pub fn get_enum_variants(tcx: ty::ctxt, def: ast::DefId) pub fn get_enum_variants(tcx: ty::ctxt, def: ast::DefId)
-> ~[@ty::VariantInfo] { -> Vec<@ty::VariantInfo> {
let cstore = tcx.cstore; let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx) return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx)
@ -141,7 +143,7 @@ pub fn get_method_name_and_explicit_self(cstore: @cstore::CStore,
} }
pub fn get_trait_method_def_ids(cstore: @cstore::CStore, pub fn get_trait_method_def_ids(cstore: @cstore::CStore,
def: ast::DefId) -> ~[ast::DefId] { def: ast::DefId) -> Vec<ast::DefId> {
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
decoder::get_trait_method_def_ids(cdata, def.node) decoder::get_trait_method_def_ids(cdata, def.node)
} }
@ -154,13 +156,13 @@ pub fn get_item_variances(cstore: @cstore::CStore,
pub fn get_provided_trait_methods(tcx: ty::ctxt, pub fn get_provided_trait_methods(tcx: ty::ctxt,
def: ast::DefId) def: ast::DefId)
-> ~[@ty::Method] { -> Vec<@ty::Method> {
let cstore = tcx.cstore; let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx) decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx)
} }
pub fn get_supertraits(tcx: ty::ctxt, def: ast::DefId) -> ~[@ty::TraitRef] { pub fn get_supertraits(tcx: ty::ctxt, def: ast::DefId) -> Vec<@ty::TraitRef> {
let cstore = tcx.cstore; let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
decoder::get_supertraits(cdata, def.node, tcx) decoder::get_supertraits(cdata, def.node, tcx)
@ -174,21 +176,21 @@ pub fn get_type_name_if_impl(cstore: @cstore::CStore, def: ast::DefId)
pub fn get_static_methods_if_impl(cstore: @cstore::CStore, pub fn get_static_methods_if_impl(cstore: @cstore::CStore,
def: ast::DefId) def: ast::DefId)
-> Option<~[StaticMethodInfo]> { -> Option<Vec<StaticMethodInfo> > {
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node) decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node)
} }
pub fn get_item_attrs(cstore: @cstore::CStore, pub fn get_item_attrs(cstore: @cstore::CStore,
def_id: ast::DefId, def_id: ast::DefId,
f: |~[@ast::MetaItem]|) { f: |Vec<@ast::MetaItem> |) {
let cdata = cstore.get_crate_data(def_id.krate); let cdata = cstore.get_crate_data(def_id.krate);
decoder::get_item_attrs(cdata, def_id.node, f) decoder::get_item_attrs(cdata, def_id.node, f)
} }
pub fn get_struct_fields(cstore: @cstore::CStore, pub fn get_struct_fields(cstore: @cstore::CStore,
def: ast::DefId) def: ast::DefId)
-> ~[ty::field_ty] { -> Vec<ty::field_ty> {
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
decoder::get_struct_fields(cstore.intr, cdata, def.node) decoder::get_struct_fields(cstore.intr, cdata, def.node)
} }
@ -222,8 +224,8 @@ pub fn get_field_type(tcx: ty::ctxt, class_id: ast::DefId,
class_id, def) ); class_id, def) );
let ty = decoder::item_type(def, the_field, tcx, cdata); let ty = decoder::item_type(def, the_field, tcx, cdata);
ty::ty_param_bounds_and_ty { ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: Rc::new(~[]), generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),
region_param_defs: Rc::new(~[])}, region_param_defs: Rc::new(Vec::new())},
ty: ty ty: ty
} }
} }
@ -262,7 +264,7 @@ pub fn get_item_visibility(cstore: @cstore::CStore,
pub fn get_native_libraries(cstore: @cstore::CStore, pub fn get_native_libraries(cstore: @cstore::CStore,
crate_num: ast::CrateNum) crate_num: ast::CrateNum)
-> ~[(cstore::NativeLibaryKind, ~str)] { -> Vec<(cstore::NativeLibaryKind, ~str)> {
let cdata = cstore.get_crate_data(crate_num); let cdata = cstore.get_crate_data(crate_num);
decoder::get_native_libraries(cdata) decoder::get_native_libraries(cdata)
} }
@ -308,7 +310,7 @@ pub fn get_macro_registrar_fn(cstore: @cstore::CStore,
pub fn get_exported_macros(cstore: @cstore::CStore, pub fn get_exported_macros(cstore: @cstore::CStore,
crate_num: ast::CrateNum) crate_num: ast::CrateNum)
-> ~[~str] { -> Vec<~str> {
let cdata = cstore.get_crate_data(crate_num); let cdata = cstore.get_crate_data(crate_num);
decoder::get_exported_macros(cdata) decoder::get_exported_macros(cdata)
} }

View File

@ -18,6 +18,7 @@ use metadata::decoder;
use metadata::loader; use metadata::loader;
use std::cell::RefCell; use std::cell::RefCell;
use std::vec_ng::Vec;
use collections::HashMap; use collections::HashMap;
use extra::c_vec::CVec; use extra::c_vec::CVec;
use syntax::ast; use syntax::ast;
@ -67,9 +68,9 @@ pub struct CrateSource {
pub struct CStore { pub struct CStore {
priv metas: RefCell<HashMap<ast::CrateNum, @crate_metadata>>, priv metas: RefCell<HashMap<ast::CrateNum, @crate_metadata>>,
priv extern_mod_crate_map: RefCell<extern_mod_crate_map>, priv extern_mod_crate_map: RefCell<extern_mod_crate_map>,
priv used_crate_sources: RefCell<~[CrateSource]>, priv used_crate_sources: RefCell<Vec<CrateSource> >,
priv used_libraries: RefCell<~[(~str, NativeLibaryKind)]>, priv used_libraries: RefCell<Vec<(~str, NativeLibaryKind)> >,
priv used_link_args: RefCell<~[~str]>, priv used_link_args: RefCell<Vec<~str> >,
intr: @IdentInterner intr: @IdentInterner
} }
@ -81,9 +82,9 @@ impl CStore {
CStore { CStore {
metas: RefCell::new(HashMap::new()), metas: RefCell::new(HashMap::new()),
extern_mod_crate_map: RefCell::new(HashMap::new()), extern_mod_crate_map: RefCell::new(HashMap::new()),
used_crate_sources: RefCell::new(~[]), used_crate_sources: RefCell::new(Vec::new()),
used_libraries: RefCell::new(~[]), used_libraries: RefCell::new(Vec::new()),
used_link_args: RefCell::new(~[]), used_link_args: RefCell::new(Vec::new()),
intr: intr intr: intr
} }
} }
@ -143,7 +144,7 @@ impl CStore {
} }
pub fn get_used_crates(&self, prefer: LinkagePreference) pub fn get_used_crates(&self, prefer: LinkagePreference)
-> ~[(ast::CrateNum, Option<Path>)] { -> Vec<(ast::CrateNum, Option<Path>)> {
let used_crate_sources = self.used_crate_sources.borrow(); let used_crate_sources = self.used_crate_sources.borrow();
used_crate_sources.get() used_crate_sources.get()
.iter() .iter()
@ -161,7 +162,7 @@ impl CStore {
} }
pub fn get_used_libraries<'a>(&'a self) pub fn get_used_libraries<'a>(&'a self)
-> &'a RefCell<~[(~str, NativeLibaryKind)]> { -> &'a RefCell<Vec<(~str, NativeLibaryKind)> > {
&self.used_libraries &self.used_libraries
} }
@ -172,7 +173,7 @@ impl CStore {
} }
} }
pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<~[~str]> { pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<Vec<~str> > {
&self.used_link_args &self.used_link_args
} }

View File

@ -33,7 +33,7 @@ use std::io;
use std::io::extensions::u64_from_be_bytes; use std::io::extensions::u64_from_be_bytes;
use std::option; use std::option;
use std::rc::Rc; use std::rc::Rc;
use std::vec; use std::vec_ng::Vec;
use serialize::ebml::reader; use serialize::ebml::reader;
use serialize::ebml; use serialize::ebml;
use serialize::Decodable; use serialize::Decodable;
@ -250,8 +250,8 @@ fn item_ty_param_defs(item: ebml::Doc,
tcx: ty::ctxt, tcx: ty::ctxt,
cdata: Cmd, cdata: Cmd,
tag: uint) tag: uint)
-> Rc<~[ty::TypeParameterDef]> { -> Rc<Vec<ty::TypeParameterDef> > {
let mut bounds = ~[]; let mut bounds = Vec::new();
reader::tagged_docs(item, tag, |p| { reader::tagged_docs(item, tag, |p| {
let bd = parse_type_param_def_data( let bd = parse_type_param_def_data(
p.data, p.start, cdata.cnum, tcx, p.data, p.start, cdata.cnum, tcx,
@ -263,8 +263,8 @@ fn item_ty_param_defs(item: ebml::Doc,
} }
fn item_region_param_defs(item_doc: ebml::Doc, cdata: Cmd) fn item_region_param_defs(item_doc: ebml::Doc, cdata: Cmd)
-> Rc<~[ty::RegionParameterDef]> { -> Rc<Vec<ty::RegionParameterDef> > {
let mut v = ~[]; let mut v = Vec::new();
reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| { reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| {
let ident_str_doc = reader::get_doc(rp_doc, let ident_str_doc = reader::get_doc(rp_doc,
tag_region_param_def_ident); tag_region_param_def_ident);
@ -287,8 +287,8 @@ fn item_ty_param_count(item: ebml::Doc) -> uint {
n n
} }
fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> ~[ast::DefId] { fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> Vec<ast::DefId> {
let mut ids: ~[ast::DefId] = ~[]; let mut ids: Vec<ast::DefId> = Vec::new();
let v = tag_items_data_item_variant; let v = tag_items_data_item_variant;
reader::tagged_docs(item, v, |p| { reader::tagged_docs(item, v, |p| {
let ext = reader::with_doc_data(p, parse_def_id); let ext = reader::with_doc_data(p, parse_def_id);
@ -298,13 +298,13 @@ fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> ~[ast::DefId] {
return ids; return ids;
} }
fn item_path(item_doc: ebml::Doc) -> ~[ast_map::PathElem] { fn item_path(item_doc: ebml::Doc) -> Vec<ast_map::PathElem> {
let path_doc = reader::get_doc(item_doc, tag_path); let path_doc = reader::get_doc(item_doc, tag_path);
let len_doc = reader::get_doc(path_doc, tag_path_len); let len_doc = reader::get_doc(path_doc, tag_path_len);
let len = reader::doc_as_u32(len_doc) as uint; let len = reader::doc_as_u32(len_doc) as uint;
let mut result = vec::with_capacity(len); let mut result = Vec::with_capacity(len);
reader::docs(path_doc, |tag, elt_doc| { reader::docs(path_doc, |tag, elt_doc| {
if tag == tag_path_elem_mod { if tag == tag_path_elem_mod {
let s = elt_doc.as_str_slice(); let s = elt_doc.as_str_slice();
@ -667,22 +667,22 @@ pub fn each_top_level_item_of_crate(intr: @IdentInterner,
callback) callback)
} }
pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> ~[ast_map::PathElem] { pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> Vec<ast_map::PathElem> {
item_path(lookup_item(id, cdata.data())) item_path(lookup_item(id, cdata.data()))
} }
pub type DecodeInlinedItem<'a> = 'a |cdata: @cstore::crate_metadata, pub type DecodeInlinedItem<'a> = 'a |cdata: @cstore::crate_metadata,
tcx: ty::ctxt, tcx: ty::ctxt,
path: ~[ast_map::PathElem], path: Vec<ast_map::PathElem> ,
par_doc: ebml::Doc| par_doc: ebml::Doc|
-> Result<ast::InlinedItem, ~[ast_map::PathElem]>; -> Result<ast::InlinedItem, Vec<ast_map::PathElem> >;
pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt, id: ast::NodeId, pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt, id: ast::NodeId,
decode_inlined_item: DecodeInlinedItem) decode_inlined_item: DecodeInlinedItem)
-> csearch::found_ast { -> csearch::found_ast {
debug!("Looking up item: {}", id); debug!("Looking up item: {}", id);
let item_doc = lookup_item(id, cdata.data()); let item_doc = lookup_item(id, cdata.data());
let path = item_path(item_doc).init().to_owned(); let path = Vec::from_slice(item_path(item_doc).init());
match decode_inlined_item(cdata, tcx, path, item_doc) { match decode_inlined_item(cdata, tcx, path, item_doc) {
Ok(ref ii) => csearch::found(*ii), Ok(ref ii) => csearch::found(*ii),
Err(path) => { Err(path) => {
@ -702,11 +702,11 @@ pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt, id: ast::NodeId,
} }
pub fn get_enum_variants(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId, pub fn get_enum_variants(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId,
tcx: ty::ctxt) -> ~[@ty::VariantInfo] { tcx: ty::ctxt) -> Vec<@ty::VariantInfo> {
let data = cdata.data(); let data = cdata.data();
let items = reader::get_doc(reader::Doc(data), tag_items); let items = reader::get_doc(reader::Doc(data), tag_items);
let item = find_item(id, items); let item = find_item(id, items);
let mut infos: ~[@ty::VariantInfo] = ~[]; let mut infos: Vec<@ty::VariantInfo> = Vec::new();
let variant_ids = enum_variant_ids(item, cdata); let variant_ids = enum_variant_ids(item, cdata);
let mut disr_val = 0; let mut disr_val = 0;
for did in variant_ids.iter() { for did in variant_ids.iter() {
@ -716,7 +716,7 @@ pub fn get_enum_variants(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId,
let name = item_name(intr, item); let name = item_name(intr, item);
let arg_tys = match ty::get(ctor_ty).sty { let arg_tys = match ty::get(ctor_ty).sty {
ty::ty_bare_fn(ref f) => f.sig.inputs.clone(), ty::ty_bare_fn(ref f) => f.sig.inputs.clone(),
_ => ~[], // Nullary enum variant. _ => Vec::new(), // Nullary enum variant.
}; };
match variant_disr_val(item) { match variant_disr_val(item) {
Some(val) => { disr_val = val; } Some(val) => { disr_val = val; }
@ -761,8 +761,8 @@ fn get_explicit_self(item: ebml::Doc) -> ast::ExplicitSelf_ {
} }
fn item_impl_methods(intr: @IdentInterner, cdata: Cmd, item: ebml::Doc, fn item_impl_methods(intr: @IdentInterner, cdata: Cmd, item: ebml::Doc,
tcx: ty::ctxt) -> ~[@ty::Method] { tcx: ty::ctxt) -> Vec<@ty::Method> {
let mut rslt = ~[]; let mut rslt = Vec::new();
reader::tagged_docs(item, tag_item_impl_method, |doc| { reader::tagged_docs(item, tag_item_impl_method, |doc| {
let m_did = reader::with_doc_data(doc, parse_def_id); let m_did = reader::with_doc_data(doc, parse_def_id);
rslt.push(@get_method(intr, cdata, m_did.node, tcx)); rslt.push(@get_method(intr, cdata, m_did.node, tcx));
@ -838,10 +838,10 @@ pub fn get_method(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId,
} }
pub fn get_trait_method_def_ids(cdata: Cmd, pub fn get_trait_method_def_ids(cdata: Cmd,
id: ast::NodeId) -> ~[ast::DefId] { id: ast::NodeId) -> Vec<ast::DefId> {
let data = cdata.data(); let data = cdata.data();
let item = lookup_item(id, data); let item = lookup_item(id, data);
let mut result = ~[]; let mut result = Vec::new();
reader::tagged_docs(item, tag_item_trait_method, |mth| { reader::tagged_docs(item, tag_item_trait_method, |mth| {
result.push(item_def_id(mth, cdata)); result.push(item_def_id(mth, cdata));
true true
@ -859,10 +859,10 @@ pub fn get_item_variances(cdata: Cmd, id: ast::NodeId) -> ty::ItemVariances {
pub fn get_provided_trait_methods(intr: @IdentInterner, cdata: Cmd, pub fn get_provided_trait_methods(intr: @IdentInterner, cdata: Cmd,
id: ast::NodeId, tcx: ty::ctxt) -> id: ast::NodeId, tcx: ty::ctxt) ->
~[@ty::Method] { Vec<@ty::Method> {
let data = cdata.data(); let data = cdata.data();
let item = lookup_item(id, data); let item = lookup_item(id, data);
let mut result = ~[]; let mut result = Vec::new();
reader::tagged_docs(item, tag_item_trait_method, |mth_id| { reader::tagged_docs(item, tag_item_trait_method, |mth_id| {
let did = item_def_id(mth_id, cdata); let did = item_def_id(mth_id, cdata);
@ -879,8 +879,8 @@ pub fn get_provided_trait_methods(intr: @IdentInterner, cdata: Cmd,
/// Returns the supertraits of the given trait. /// Returns the supertraits of the given trait.
pub fn get_supertraits(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt) pub fn get_supertraits(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
-> ~[@ty::TraitRef] { -> Vec<@ty::TraitRef> {
let mut results = ~[]; let mut results = Vec::new();
let item_doc = lookup_item(id, cdata.data()); let item_doc = lookup_item(id, cdata.data());
reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| { reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
// NB. Only reads the ones that *aren't* builtin-bounds. See also // NB. Only reads the ones that *aren't* builtin-bounds. See also
@ -914,7 +914,7 @@ pub fn get_type_name_if_impl(cdata: Cmd,
pub fn get_static_methods_if_impl(intr: @IdentInterner, pub fn get_static_methods_if_impl(intr: @IdentInterner,
cdata: Cmd, cdata: Cmd,
node_id: ast::NodeId) node_id: ast::NodeId)
-> Option<~[StaticMethodInfo]> { -> Option<Vec<StaticMethodInfo> > {
let item = lookup_item(node_id, cdata.data()); let item = lookup_item(node_id, cdata.data());
if item_family(item) != Impl { if item_family(item) != Impl {
return None; return None;
@ -927,13 +927,13 @@ pub fn get_static_methods_if_impl(intr: @IdentInterner,
if !ret { return None } if !ret { return None }
let mut impl_method_ids = ~[]; let mut impl_method_ids = Vec::new();
reader::tagged_docs(item, tag_item_impl_method, |impl_method_doc| { reader::tagged_docs(item, tag_item_impl_method, |impl_method_doc| {
impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id)); impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id));
true true
}); });
let mut static_impl_methods = ~[]; let mut static_impl_methods = Vec::new();
for impl_method_id in impl_method_ids.iter() { for impl_method_id in impl_method_ids.iter() {
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data()); let impl_method_doc = lookup_item(impl_method_id.node, cdata.data());
let family = item_family(impl_method_doc); let family = item_family(impl_method_doc);
@ -975,7 +975,7 @@ pub fn get_tuple_struct_definition_if_ctor(cdata: Cmd,
pub fn get_item_attrs(cdata: Cmd, pub fn get_item_attrs(cdata: Cmd,
node_id: ast::NodeId, node_id: ast::NodeId,
f: |~[@ast::MetaItem]|) { f: |Vec<@ast::MetaItem> |) {
// The attributes for a tuple struct are attached to the definition, not the ctor; // The attributes for a tuple struct are attached to the definition, not the ctor;
// we assume that someone passing in a tuple struct ctor is actually wanting to // we assume that someone passing in a tuple struct ctor is actually wanting to
// look at the definition // look at the definition
@ -1000,10 +1000,10 @@ fn struct_field_family_to_visibility(family: Family) -> ast::Visibility {
} }
pub fn get_struct_fields(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId) pub fn get_struct_fields(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId)
-> ~[ty::field_ty] { -> Vec<ty::field_ty> {
let data = cdata.data(); let data = cdata.data();
let item = lookup_item(id, data); let item = lookup_item(id, data);
let mut result = ~[]; let mut result = Vec::new();
reader::tagged_docs(item, tag_item_field, |an_item| { reader::tagged_docs(item, tag_item_field, |an_item| {
let f = item_family(an_item); let f = item_family(an_item);
if f == PublicField || f == PrivateField || f == InheritedField { if f == PublicField || f == PrivateField || f == InheritedField {
@ -1035,8 +1035,8 @@ pub fn get_item_visibility(cdata: Cmd, id: ast::NodeId)
item_visibility(lookup_item(id, cdata.data())) item_visibility(lookup_item(id, cdata.data()))
} }
fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] { fn get_meta_items(md: ebml::Doc) -> Vec<@ast::MetaItem> {
let mut items: ~[@ast::MetaItem] = ~[]; let mut items: Vec<@ast::MetaItem> = Vec::new();
reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| { reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = token::intern_and_get_ident(nd.as_str_slice()); let n = token::intern_and_get_ident(nd.as_str_slice());
@ -1063,8 +1063,8 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
return items; return items;
} }
fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] { fn get_attributes(md: ebml::Doc) -> Vec<ast::Attribute> {
let mut attrs: ~[ast::Attribute] = ~[]; let mut attrs: Vec<ast::Attribute> = Vec::new();
match reader::maybe_get_doc(md, tag_attributes) { match reader::maybe_get_doc(md, tag_attributes) {
option::Some(attrs_d) => { option::Some(attrs_d) => {
reader::tagged_docs(attrs_d, tag_attribute, |attr_doc| { reader::tagged_docs(attrs_d, tag_attribute, |attr_doc| {
@ -1072,7 +1072,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
// Currently it's only possible to have a single meta item on // Currently it's only possible to have a single meta item on
// an attribute // an attribute
assert_eq!(meta_items.len(), 1u); assert_eq!(meta_items.len(), 1u);
let meta_item = meta_items[0]; let meta_item = *meta_items.get(0);
attrs.push( attrs.push(
codemap::Spanned { codemap::Spanned {
node: ast::Attribute_ { node: ast::Attribute_ {
@ -1102,7 +1102,7 @@ fn list_crate_attributes(md: ebml::Doc, hash: &Svh,
write!(out, "\n\n") write!(out, "\n\n")
} }
pub fn get_crate_attributes(data: &[u8]) -> ~[ast::Attribute] { pub fn get_crate_attributes(data: &[u8]) -> Vec<ast::Attribute> {
return get_attributes(reader::Doc(data)); return get_attributes(reader::Doc(data));
} }
@ -1113,8 +1113,8 @@ pub struct CrateDep {
hash: Svh, hash: Svh,
} }
pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] { pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
let mut deps: ~[CrateDep] = ~[]; let mut deps: Vec<CrateDep> = Vec::new();
let cratedoc = reader::Doc(data); let cratedoc = reader::Doc(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps); let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1; let mut crate_num = 1;
@ -1255,10 +1255,10 @@ pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
} }
pub fn get_native_libraries(cdata: Cmd) -> ~[(cstore::NativeLibaryKind, ~str)] { pub fn get_native_libraries(cdata: Cmd) -> Vec<(cstore::NativeLibaryKind, ~str)> {
let libraries = reader::get_doc(reader::Doc(cdata.data()), let libraries = reader::get_doc(reader::Doc(cdata.data()),
tag_native_libraries); tag_native_libraries);
let mut result = ~[]; let mut result = Vec::new();
reader::tagged_docs(libraries, tag_native_libraries_lib, |lib_doc| { reader::tagged_docs(libraries, tag_native_libraries_lib, |lib_doc| {
let kind_doc = reader::get_doc(lib_doc, tag_native_libraries_kind); let kind_doc = reader::get_doc(lib_doc, tag_native_libraries_kind);
let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name); let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name);
@ -1276,10 +1276,10 @@ pub fn get_macro_registrar_fn(cdata: Cmd) -> Option<ast::DefId> {
.map(|doc| item_def_id(doc, cdata)) .map(|doc| item_def_id(doc, cdata))
} }
pub fn get_exported_macros(cdata: Cmd) -> ~[~str] { pub fn get_exported_macros(cdata: Cmd) -> Vec<~str> {
let macros = reader::get_doc(reader::Doc(cdata.data()), let macros = reader::get_doc(reader::Doc(cdata.data()),
tag_exported_macros); tag_exported_macros);
let mut result = ~[]; let mut result = Vec::new();
reader::tagged_docs(macros, tag_macro_def, |macro_doc| { reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
result.push(macro_doc.as_str()); result.push(macro_doc.as_str());
true true

View File

@ -32,6 +32,7 @@ use std::hash;
use std::hash::Hash; use std::hash::Hash;
use std::io::MemWriter; use std::io::MemWriter;
use std::str; use std::str;
use std::vec_ng::Vec;
use collections::HashMap; use collections::HashMap;
use syntax::abi::AbiSet; use syntax::abi::AbiSet;
use syntax::ast::*; use syntax::ast::*;
@ -324,7 +325,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
id: NodeId, id: NodeId,
variants: &[P<Variant>], variants: &[P<Variant>],
index: @RefCell<~[entry<i64>]>, index: @RefCell<Vec<entry<i64>> >,
generics: &ast::Generics) { generics: &ast::Generics) {
debug!("encode_enum_variant_info(id={:?})", id); debug!("encode_enum_variant_info(id={:?})", id);
@ -367,9 +368,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
encode_index(ebml_w, bkts, write_i64); encode_index(ebml_w, bkts, write_i64);
} }
} }
if vi[i].disr_val != disr_val { if vi.get(i).disr_val != disr_val {
encode_disr_val(ecx, ebml_w, vi[i].disr_val); encode_disr_val(ecx, ebml_w, vi.get(i).disr_val);
disr_val = vi[i].disr_val; disr_val = vi.get(i).disr_val;
} }
encode_bounds_and_type(ebml_w, ecx, encode_bounds_and_type(ebml_w, ecx,
&lookup_item_type(ecx.tcx, def_id)); &lookup_item_type(ecx.tcx, def_id));
@ -687,11 +688,11 @@ fn encode_provided_source(ebml_w: &mut writer::Encoder,
fn encode_info_for_struct(ecx: &EncodeContext, fn encode_info_for_struct(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
fields: &[StructField], fields: &[StructField],
global_index: @RefCell<~[entry<i64>]>) global_index: @RefCell<Vec<entry<i64>> >)
-> ~[entry<i64>] { -> Vec<entry<i64>> {
/* Each class has its own index, since different classes /* Each class has its own index, since different classes
may have fields with the same name */ may have fields with the same name */
let mut index = ~[]; let mut index = Vec::new();
let tcx = ecx.tcx; let tcx = ecx.tcx;
/* We encode both private and public fields -- need to include /* We encode both private and public fields -- need to include
private fields to get the offsets right */ private fields to get the offsets right */
@ -726,7 +727,7 @@ fn encode_info_for_struct_ctor(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
name: ast::Ident, name: ast::Ident,
ctor_id: NodeId, ctor_id: NodeId,
index: @RefCell<~[entry<i64>]>, index: @RefCell<Vec<entry<i64>> >,
struct_id: NodeId) { struct_id: NodeId) {
{ {
let mut index = index.borrow_mut(); let mut index = index.borrow_mut();
@ -888,13 +889,13 @@ fn encode_extension_implementations(ecx: &EncodeContext,
fn encode_info_for_item(ecx: &EncodeContext, fn encode_info_for_item(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
item: &Item, item: &Item,
index: @RefCell<~[entry<i64>]>, index: @RefCell<Vec<entry<i64>> >,
path: PathElems, path: PathElems,
vis: ast::Visibility) { vis: ast::Visibility) {
let tcx = ecx.tcx; let tcx = ecx.tcx;
fn add_to_index(item: &Item, ebml_w: &writer::Encoder, fn add_to_index(item: &Item, ebml_w: &writer::Encoder,
index: @RefCell<~[entry<i64>]>) { index: @RefCell<Vec<entry<i64>> >) {
let mut index = index.borrow_mut(); let mut index = index.borrow_mut();
index.get().push(entry { index.get().push(entry {
val: item.id as i64, val: item.id as i64,
@ -1239,7 +1240,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
fn encode_info_for_foreign_item(ecx: &EncodeContext, fn encode_info_for_foreign_item(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
nitem: &ForeignItem, nitem: &ForeignItem,
index: @RefCell<~[entry<i64>]>, index: @RefCell<Vec<entry<i64>> >,
path: PathElems, path: PathElems,
abi: AbiSet) { abi: AbiSet) {
{ {
@ -1284,7 +1285,7 @@ fn my_visit_expr(_e: &Expr) { }
fn my_visit_item(i: &Item, fn my_visit_item(i: &Item,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
ecx_ptr: *int, ecx_ptr: *int,
index: @RefCell<~[entry<i64>]>) { index: @RefCell<Vec<entry<i64>> >) {
let mut ebml_w = unsafe { ebml_w.unsafe_clone() }; let mut ebml_w = unsafe { ebml_w.unsafe_clone() };
// See above // See above
let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) }; let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
@ -1296,7 +1297,7 @@ fn my_visit_item(i: &Item,
fn my_visit_foreign_item(ni: &ForeignItem, fn my_visit_foreign_item(ni: &ForeignItem,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
ecx_ptr:*int, ecx_ptr:*int,
index: @RefCell<~[entry<i64>]>) { index: @RefCell<Vec<entry<i64>> >) {
// See above // See above
let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) }; let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
debug!("writing foreign item {}::{}", debug!("writing foreign item {}::{}",
@ -1317,7 +1318,7 @@ fn my_visit_foreign_item(ni: &ForeignItem,
struct EncodeVisitor<'a,'b> { struct EncodeVisitor<'a,'b> {
ebml_w_for_visit_item: &'a mut writer::Encoder<'b>, ebml_w_for_visit_item: &'a mut writer::Encoder<'b>,
ecx_ptr:*int, ecx_ptr:*int,
index: @RefCell<~[entry<i64>]>, index: @RefCell<Vec<entry<i64>> >,
} }
impl<'a,'b> visit::Visitor<()> for EncodeVisitor<'a,'b> { impl<'a,'b> visit::Visitor<()> for EncodeVisitor<'a,'b> {
@ -1344,8 +1345,8 @@ impl<'a,'b> visit::Visitor<()> for EncodeVisitor<'a,'b> {
fn encode_info_for_items(ecx: &EncodeContext, fn encode_info_for_items(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
krate: &Crate) krate: &Crate)
-> ~[entry<i64>] { -> Vec<entry<i64>> {
let index = @RefCell::new(~[]); let index = @RefCell::new(Vec::new());
ebml_w.start_tag(tag_items_data); ebml_w.start_tag(tag_items_data);
{ {
let mut index = index.borrow_mut(); let mut index = index.borrow_mut();
@ -1382,19 +1383,19 @@ fn encode_info_for_items(ecx: &EncodeContext,
// Path and definition ID indexing // Path and definition ID indexing
fn create_index<T:Clone + Hash + 'static>( fn create_index<T:Clone + Hash + 'static>(
index: ~[entry<T>]) index: Vec<entry<T>> )
-> ~[@~[entry<T>]] { -> Vec<@Vec<entry<T>> > {
let mut buckets: ~[@RefCell<~[entry<T>]>] = ~[]; let mut buckets: Vec<@RefCell<Vec<entry<T>> >> = Vec::new();
for _ in range(0u, 256u) { for _ in range(0u, 256u) {
buckets.push(@RefCell::new(~[])); buckets.push(@RefCell::new(Vec::new()));
} }
for elt in index.iter() { for elt in index.iter() {
let h = hash::hash(&elt.val) as uint; let h = hash::hash(&elt.val) as uint;
let mut bucket = buckets[h % 256].borrow_mut(); let mut bucket = buckets.get_mut(h % 256).borrow_mut();
bucket.get().push((*elt).clone()); bucket.get().push((*elt).clone());
} }
let mut buckets_frozen = ~[]; let mut buckets_frozen = Vec::new();
for bucket in buckets.iter() { for bucket in buckets.iter() {
buckets_frozen.push(@/*bad*/(**bucket).get()); buckets_frozen.push(@/*bad*/(**bucket).get());
} }
@ -1403,10 +1404,10 @@ fn create_index<T:Clone + Hash + 'static>(
fn encode_index<T:'static>( fn encode_index<T:'static>(
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
buckets: ~[@~[entry<T>]], buckets: Vec<@Vec<entry<T>> > ,
write_fn: |&mut MemWriter, &T|) { write_fn: |&mut MemWriter, &T|) {
ebml_w.start_tag(tag_index); ebml_w.start_tag(tag_index);
let mut bucket_locs = ~[]; let mut bucket_locs = Vec::new();
ebml_w.start_tag(tag_index_buckets); ebml_w.start_tag(tag_index_buckets);
for bucket in buckets.iter() { for bucket in buckets.iter() {
bucket_locs.push(ebml_w.writer.tell().unwrap()); bucket_locs.push(ebml_w.writer.tell().unwrap());
@ -1491,7 +1492,7 @@ fn encode_attributes(ebml_w: &mut writer::Encoder, attrs: &[Attribute]) {
// metadata that Rust cares about for linking crates. If the user didn't // metadata that Rust cares about for linking crates. If the user didn't
// provide it we will throw it in anyway with a default value. // provide it we will throw it in anyway with a default value.
fn synthesize_crate_attrs(ecx: &EncodeContext, fn synthesize_crate_attrs(ecx: &EncodeContext,
krate: &Crate) -> ~[Attribute] { krate: &Crate) -> Vec<Attribute> {
fn synthesize_crateid_attr(ecx: &EncodeContext) -> Attribute { fn synthesize_crateid_attr(ecx: &EncodeContext) -> Attribute {
assert!(!ecx.link_meta.crateid.name.is_empty()); assert!(!ecx.link_meta.crateid.name.is_empty());
@ -1502,7 +1503,7 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
token::intern_and_get_ident(ecx.link_meta.crateid.to_str()))) token::intern_and_get_ident(ecx.link_meta.crateid.to_str())))
} }
let mut attrs = ~[]; let mut attrs = Vec::new();
for attr in krate.attrs.iter() { for attr in krate.attrs.iter() {
if !attr.name().equiv(&("crate_id")) { if !attr.name().equiv(&("crate_id")) {
attrs.push(*attr); attrs.push(*attr);
@ -1514,9 +1515,9 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
} }
fn encode_crate_deps(ebml_w: &mut writer::Encoder, cstore: &cstore::CStore) { fn encode_crate_deps(ebml_w: &mut writer::Encoder, cstore: &cstore::CStore) {
fn get_ordered_deps(cstore: &cstore::CStore) -> ~[decoder::CrateDep] { fn get_ordered_deps(cstore: &cstore::CStore) -> Vec<decoder::CrateDep> {
// Pull the cnums and name,vers,hash out of cstore // Pull the cnums and name,vers,hash out of cstore
let mut deps = ~[]; let mut deps = Vec::new();
cstore.iter_crate_data(|key, val| { cstore.iter_crate_data(|key, val| {
let dep = decoder::CrateDep { let dep = decoder::CrateDep {
cnum: key, cnum: key,
@ -1767,10 +1768,10 @@ pub static metadata_encoding_version : &'static [u8] =
0x74, //'t' as u8, 0x74, //'t' as u8,
0, 0, 0, 1 ]; 0, 0, 0, 1 ];
pub fn encode_metadata(parms: EncodeParams, krate: &Crate) -> ~[u8] { pub fn encode_metadata(parms: EncodeParams, krate: &Crate) -> Vec<u8> {
let mut wr = MemWriter::new(); let mut wr = MemWriter::new();
encode_metadata_inner(&mut wr, parms, krate); encode_metadata_inner(&mut wr, parms, krate);
wr.unwrap() wr.unwrap().move_iter().collect()
} }
fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate) { fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate) {
@ -1822,7 +1823,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
let mut i = ebml_w.writer.tell().unwrap(); let mut i = ebml_w.writer.tell().unwrap();
let crate_attrs = synthesize_crate_attrs(&ecx, krate); let crate_attrs = synthesize_crate_attrs(&ecx, krate);
encode_attributes(&mut ebml_w, crate_attrs); encode_attributes(&mut ebml_w, crate_attrs.as_slice());
ecx.stats.attr_bytes.set(ebml_w.writer.tell().unwrap() - i); ecx.stats.attr_bytes.set(ebml_w.writer.tell().unwrap() - i);
i = ebml_w.writer.tell().unwrap(); i = ebml_w.writer.tell().unwrap();

View File

@ -14,6 +14,7 @@ use std::cell::RefCell;
use std::option; use std::option;
use std::os; use std::os;
use std::io::fs; use std::io::fs;
use std::vec_ng::Vec;
use collections::HashSet; use collections::HashSet;
pub enum FileMatch { FileMatches, FileDoesntMatch } pub enum FileMatch { FileMatches, FileDoesntMatch }
@ -205,14 +206,14 @@ pub fn get_rust_path() -> Option<~str> {
/// $HOME/.rust /// $HOME/.rust
/// DIR/.rust for any DIR that's the current working directory /// DIR/.rust for any DIR that's the current working directory
/// or an ancestor of it /// or an ancestor of it
pub fn rust_path() -> ~[Path] { pub fn rust_path() -> Vec<Path> {
let mut env_rust_path: ~[Path] = match get_rust_path() { let mut env_rust_path: Vec<Path> = match get_rust_path() {
Some(env_path) => { Some(env_path) => {
let env_path_components: ~[&str] = let env_path_components: Vec<&str> =
env_path.split_str(PATH_ENTRY_SEPARATOR).collect(); env_path.split_str(PATH_ENTRY_SEPARATOR).collect();
env_path_components.map(|&s| Path::new(s)) env_path_components.map(|&s| Path::new(s))
} }
None => ~[] None => Vec::new()
}; };
let mut cwd = os::getcwd(); let mut cwd = os::getcwd();
// now add in default entries // now add in default entries

View File

@ -31,6 +31,7 @@ use std::io;
use std::os::consts::{macos, freebsd, linux, android, win32}; use std::os::consts::{macos, freebsd, linux, android, win32};
use std::str; use std::str;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use flate; use flate;
@ -183,7 +184,7 @@ impl<'a> Context<'a> {
// A Library candidate is created if the metadata for the set of // A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this // libraries corresponds to the crate id and hash criteria that this
// serach is being performed for. // serach is being performed for.
let mut libraries = ~[]; let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() { for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None; let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata); let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
@ -205,7 +206,7 @@ impl<'a> Context<'a> {
// libraries or not. // libraries or not.
match libraries.len() { match libraries.len() {
0 => None, 0 => None,
1 => Some(libraries[0]), 1 => Some(libraries.move_iter().next().unwrap()),
_ => { _ => {
self.sess.span_err(self.span, self.sess.span_err(self.span,
format!("multiple matching crates for `{}`", format!("multiple matching crates for `{}`",

View File

@ -20,6 +20,7 @@ use middle::ty;
use std::str; use std::str;
use std::uint; use std::uint;
use std::vec_ng::Vec;
use syntax::abi::AbiSet; use syntax::abi::AbiSet;
use syntax::abi; use syntax::abi;
use syntax::ast; use syntax::ast;
@ -177,7 +178,7 @@ fn parse_substs(st: &mut PState, conv: conv_did) -> ty::substs {
let self_ty = parse_opt(st, |st| parse_ty(st, |x,y| conv(x,y)) ); let self_ty = parse_opt(st, |st| parse_ty(st, |x,y| conv(x,y)) );
assert_eq!(next(st), '['); assert_eq!(next(st), '[');
let mut params: ~[ty::t] = ~[]; let mut params: Vec<ty::t> = Vec::new();
while peek(st) != ']' { params.push(parse_ty(st, |x,y| conv(x,y))); } while peek(st) != ']' { params.push(parse_ty(st, |x,y| conv(x,y))); }
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
@ -362,7 +363,7 @@ fn parse_ty(st: &mut PState, conv: conv_did) -> ty::t {
} }
'T' => { 'T' => {
assert_eq!(next(st), '['); assert_eq!(next(st), '[');
let mut params = ~[]; let mut params = Vec::new();
while peek(st) != ']' { params.push(parse_ty(st, |x,y| conv(x,y))); } while peek(st) != ']' { params.push(parse_ty(st, |x,y| conv(x,y))); }
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
return ty::mk_tup(st.tcx, params); return ty::mk_tup(st.tcx, params);
@ -520,7 +521,7 @@ fn parse_sig(st: &mut PState, conv: conv_did) -> ty::FnSig {
assert_eq!(next(st), '['); assert_eq!(next(st), '[');
let id = parse_uint(st) as ast::NodeId; let id = parse_uint(st) as ast::NodeId;
assert_eq!(next(st), '|'); assert_eq!(next(st), '|');
let mut inputs = ~[]; let mut inputs = Vec::new();
while peek(st) != ']' { while peek(st) != ']' {
inputs.push(parse_ty(st, |x,y| conv(x,y))); inputs.push(parse_ty(st, |x,y| conv(x,y)));
} }
@ -583,7 +584,7 @@ fn parse_type_param_def(st: &mut PState, conv: conv_did) -> ty::TypeParameterDef
fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds { fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds {
let mut param_bounds = ty::ParamBounds { let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(), builtin_bounds: ty::EmptyBuiltinBounds(),
trait_bounds: ~[] trait_bounds: Vec::new()
}; };
loop { loop {
match next(st) { match next(st) {

View File

@ -19,6 +19,7 @@ use std::io;
use std::io::MemWriter; use std::io::MemWriter;
use std::str; use std::str;
use std::fmt; use std::fmt;
use std::vec_ng::Vec;
use middle::ty::param_ty; use middle::ty::param_ty;
use middle::ty; use middle::ty;
@ -286,7 +287,7 @@ fn enc_sty(w: &mut MemWriter, cx: @ctxt, st: &ty::sty) {
enc_trait_store(w, cx, store); enc_trait_store(w, cx, store);
enc_mutability(w, mt); enc_mutability(w, mt);
let bounds = ty::ParamBounds {builtin_bounds: bounds, let bounds = ty::ParamBounds {builtin_bounds: bounds,
trait_bounds: ~[]}; trait_bounds: Vec::new()};
enc_bounds(w, cx, &bounds); enc_bounds(w, cx, &bounds);
mywrite!(w, "]"); mywrite!(w, "]");
} }
@ -383,7 +384,7 @@ fn enc_closure_ty(w: &mut MemWriter, cx: @ctxt, ft: &ty::ClosureTy) {
enc_onceness(w, ft.onceness); enc_onceness(w, ft.onceness);
enc_region(w, cx, ft.region); enc_region(w, cx, ft.region);
let bounds = ty::ParamBounds {builtin_bounds: ft.bounds, let bounds = ty::ParamBounds {builtin_bounds: ft.bounds,
trait_bounds: ~[]}; trait_bounds: Vec::new()};
enc_bounds(w, cx, &bounds); enc_bounds(w, cx, &bounds);
enc_fn_sig(w, cx, &ft.sig); enc_fn_sig(w, cx, &ft.sig);
} }

View File

@ -116,9 +116,9 @@ pub fn encode_exported_macro(ebml_w: &mut writer::Encoder, i: &ast::Item) {
pub fn decode_inlined_item(cdata: @cstore::crate_metadata, pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
tcx: ty::ctxt, tcx: ty::ctxt,
maps: Maps, maps: Maps,
path: ~[ast_map::PathElem], path: Vec<ast_map::PathElem> ,
par_doc: ebml::Doc) par_doc: ebml::Doc)
-> Result<ast::InlinedItem, ~[ast_map::PathElem]> { -> Result<ast::InlinedItem, Vec<ast_map::PathElem> > {
let dcx = @DecodeContext { let dcx = @DecodeContext {
cdata: cdata, cdata: cdata,
tcx: tcx, tcx: tcx,
@ -395,7 +395,7 @@ impl ast_map::FoldOps for AstRenumberer {
fn renumber_and_map_ast(xcx: @ExtendedDecodeContext, fn renumber_and_map_ast(xcx: @ExtendedDecodeContext,
map: &ast_map::Map, map: &ast_map::Map,
path: ~[ast_map::PathElem], path: Vec<ast_map::PathElem> ,
ii: ast::InlinedItem) -> ast::InlinedItem { ii: ast::InlinedItem) -> ast::InlinedItem {
ast_map::map_decoded_item(map, ast_map::map_decoded_item(map,
path.move_iter().collect(), path.move_iter().collect(),
@ -654,7 +654,7 @@ pub fn encode_vtable_res(ecx: &e::EncodeContext,
// ty::t doesn't work, and there is no way (atm) to have // ty::t doesn't work, and there is no way (atm) to have
// hand-written encoding routines combine with auto-generated // hand-written encoding routines combine with auto-generated
// ones. perhaps we should fix this. // ones. perhaps we should fix this.
ebml_w.emit_from_vec(*dr, |ebml_w, param_tables| { ebml_w.emit_from_vec(dr.as_slice(), |ebml_w, param_tables| {
encode_vtable_param_res(ecx, ebml_w, *param_tables); encode_vtable_param_res(ecx, ebml_w, *param_tables);
}) })
} }
@ -662,7 +662,7 @@ pub fn encode_vtable_res(ecx: &e::EncodeContext,
pub fn encode_vtable_param_res(ecx: &e::EncodeContext, pub fn encode_vtable_param_res(ecx: &e::EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
param_tables: typeck::vtable_param_res) { param_tables: typeck::vtable_param_res) {
ebml_w.emit_from_vec(*param_tables, |ebml_w, vtable_origin| { ebml_w.emit_from_vec(param_tables.as_slice(), |ebml_w, vtable_origin| {
encode_vtable_origin(ecx, ebml_w, vtable_origin) encode_vtable_origin(ecx, ebml_w, vtable_origin)
}) })
} }
@ -679,7 +679,7 @@ pub fn encode_vtable_origin(ecx: &e::EncodeContext,
ebml_w.emit_def_id(def_id) ebml_w.emit_def_id(def_id)
}); });
ebml_w.emit_enum_variant_arg(1u, |ebml_w| { ebml_w.emit_enum_variant_arg(1u, |ebml_w| {
ebml_w.emit_tys(ecx, *tys); ebml_w.emit_tys(ecx, tys.as_slice());
}); });
ebml_w.emit_enum_variant_arg(2u, |ebml_w| { ebml_w.emit_enum_variant_arg(2u, |ebml_w| {
encode_vtable_res(ecx, ebml_w, vtable_res); encode_vtable_res(ecx, ebml_w, vtable_res);
@ -718,6 +718,8 @@ impl<'a> vtable_decoder_helpers for reader::Decoder<'a> {
-> typeck::vtable_res { -> typeck::vtable_res {
@self.read_to_vec(|this| @self.read_to_vec(|this|
this.read_vtable_param_res(tcx, cdata)) this.read_vtable_param_res(tcx, cdata))
.move_iter()
.collect()
} }
fn read_vtable_param_res(&mut self, fn read_vtable_param_res(&mut self,
@ -725,6 +727,8 @@ impl<'a> vtable_decoder_helpers for reader::Decoder<'a> {
-> typeck::vtable_param_res { -> typeck::vtable_param_res {
@self.read_to_vec(|this| @self.read_to_vec(|this|
this.read_vtable_origin(tcx, cdata)) this.read_vtable_origin(tcx, cdata))
.move_iter()
.collect()
} }
fn read_vtable_origin(&mut self, fn read_vtable_origin(&mut self,
@ -985,7 +989,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
ebml_w.tag(c::tag_table_node_type_subst, |ebml_w| { ebml_w.tag(c::tag_table_node_type_subst, |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
ebml_w.tag(c::tag_table_val, |ebml_w| { ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_tys(ecx, **tys) ebml_w.emit_tys(ecx, tys.as_slice())
}) })
}) })
} }
@ -998,7 +1002,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
ebml_w.tag(c::tag_table_freevars, |ebml_w| { ebml_w.tag(c::tag_table_freevars, |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
ebml_w.tag(c::tag_table_val, |ebml_w| { ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_from_vec(**fv, |ebml_w, fv_entry| { ebml_w.emit_from_vec(fv.as_slice(), |ebml_w, fv_entry| {
encode_freevar_entry(ebml_w, *fv_entry) encode_freevar_entry(ebml_w, *fv_entry)
}) })
}) })
@ -1077,7 +1081,8 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
ebml_w.tag(c::tag_table_capture_map, |ebml_w| { ebml_w.tag(c::tag_table_capture_map, |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
ebml_w.tag(c::tag_table_val, |ebml_w| { ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_from_vec(*cap_vars.borrow(), |ebml_w, cap_var| { ebml_w.emit_from_vec(cap_vars.borrow().as_slice(),
|ebml_w, cap_var| {
cap_var.encode(ebml_w); cap_var.encode(ebml_w);
}) })
}) })
@ -1100,7 +1105,7 @@ impl<'a> doc_decoder_helpers for ebml::Doc<'a> {
trait ebml_decoder_decoder_helpers { trait ebml_decoder_decoder_helpers {
fn read_ty(&mut self, xcx: @ExtendedDecodeContext) -> ty::t; fn read_ty(&mut self, xcx: @ExtendedDecodeContext) -> ty::t;
fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> ~[ty::t]; fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> Vec<ty::t> ;
fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext) fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext)
-> ty::TypeParameterDef; -> ty::TypeParameterDef;
fn read_ty_param_bounds_and_ty(&mut self, xcx: @ExtendedDecodeContext) fn read_ty_param_bounds_and_ty(&mut self, xcx: @ExtendedDecodeContext)
@ -1119,7 +1124,7 @@ trait ebml_decoder_decoder_helpers {
tcx: ty::ctxt, cdata: @cstore::crate_metadata) -> ty::t; tcx: ty::ctxt, cdata: @cstore::crate_metadata) -> ty::t;
fn read_tys_noxcx(&mut self, fn read_tys_noxcx(&mut self,
tcx: ty::ctxt, tcx: ty::ctxt,
cdata: @cstore::crate_metadata) -> ~[ty::t]; cdata: @cstore::crate_metadata) -> Vec<ty::t> ;
} }
impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> { impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
@ -1137,8 +1142,10 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
fn read_tys_noxcx(&mut self, fn read_tys_noxcx(&mut self,
tcx: ty::ctxt, tcx: ty::ctxt,
cdata: @cstore::crate_metadata) -> ~[ty::t] { cdata: @cstore::crate_metadata) -> Vec<ty::t> {
self.read_to_vec(|this| this.read_ty_noxcx(tcx, cdata) ) self.read_to_vec(|this| this.read_ty_noxcx(tcx, cdata) )
.move_iter()
.collect()
} }
fn read_ty(&mut self, xcx: @ExtendedDecodeContext) -> ty::t { fn read_ty(&mut self, xcx: @ExtendedDecodeContext) -> ty::t {
@ -1169,8 +1176,8 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
} }
} }
fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> ~[ty::t] { fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> Vec<ty::t> {
self.read_to_vec(|this| this.read_ty(xcx) ) self.read_to_vec(|this| this.read_ty(xcx)).move_iter().collect()
} }
fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext) fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext)
@ -1197,7 +1204,9 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
0, 0,
|this| { |this| {
Rc::new(this.read_to_vec(|this| Rc::new(this.read_to_vec(|this|
this.read_type_param_def(xcx))) this.read_type_param_def(xcx))
.move_iter()
.collect())
}), }),
region_param_defs: region_param_defs:
this.read_struct_field("region_param_defs", this.read_struct_field("region_param_defs",
@ -1357,7 +1366,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
c::tag_table_freevars => { c::tag_table_freevars => {
let fv_info = @val_dsr.read_to_vec(|val_dsr| { let fv_info = @val_dsr.read_to_vec(|val_dsr| {
@val_dsr.read_freevar_entry(xcx) @val_dsr.read_freevar_entry(xcx)
}); }).move_iter().collect();
let mut freevars = dcx.tcx.freevars.borrow_mut(); let mut freevars = dcx.tcx.freevars.borrow_mut();
freevars.get().insert(id, fv_info); freevars.get().insert(id, fv_info);
} }
@ -1394,7 +1403,9 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
} }
c::tag_table_capture_map => { c::tag_table_capture_map => {
let cvars = let cvars =
val_dsr.read_to_vec(|val_dsr| val_dsr.read_capture_var(xcx)); val_dsr.read_to_vec(|val_dsr| val_dsr.read_capture_var(xcx))
.move_iter()
.collect();
let mut capture_map = dcx.maps let mut capture_map = dcx.maps
.capture_map .capture_map
.borrow_mut(); .borrow_mut();
@ -1510,14 +1521,14 @@ fn test_simplification() {
let item = quote_item!(cx, let item = quote_item!(cx,
fn new_int_alist<B>() -> alist<int, B> { fn new_int_alist<B>() -> alist<int, B> {
fn eq_int(a: int, b: int) -> bool { a == b } fn eq_int(a: int, b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: ~[]}; return alist {eq_fn: eq_int, data: Vec::new()};
} }
).unwrap(); ).unwrap();
let item_in = e::IIItemRef(item); let item_in = e::IIItemRef(item);
let item_out = simplify_ast(item_in); let item_out = simplify_ast(item_in);
let item_exp = ast::IIItem(quote_item!(cx, let item_exp = ast::IIItem(quote_item!(cx,
fn new_int_alist<B>() -> alist<int, B> { fn new_int_alist<B>() -> alist<int, B> {
return alist {eq_fn: eq_int, data: ~[]}; return alist {eq_fn: eq_int, data: Vec::new()};
} }
).unwrap()); ).unwrap());
match (item_out, item_exp) { match (item_out, item_exp) {

View File

@ -22,6 +22,7 @@ use mc = middle::mem_categorization;
use middle::borrowck::*; use middle::borrowck::*;
use middle::moves; use middle::moves;
use middle::ty; use middle::ty;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::codemap::Span; use syntax::codemap::Span;
@ -143,11 +144,11 @@ impl<'a> CheckLoanCtxt<'a> {
}) })
} }
pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> ~[uint] { pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> Vec<uint> {
//! Returns a vector of the loans that are generated as //! Returns a vector of the loans that are generated as
//! we encounter `scope_id`. //! we encounter `scope_id`.
let mut result = ~[]; let mut result = Vec::new();
self.dfcx_loans.each_gen_bit_frozen(scope_id, |loan_index| { self.dfcx_loans.each_gen_bit_frozen(scope_id, |loan_index| {
result.push(loan_index); result.push(loan_index);
true true

View File

@ -27,6 +27,7 @@ use util::common::indenter;
use util::ppaux::{Repr}; use util::ppaux::{Repr};
use std::cell::RefCell; use std::cell::RefCell;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::ast_util::IdRange; use syntax::ast_util::IdRange;
@ -70,10 +71,9 @@ struct GatherLoanCtxt<'a> {
bccx: &'a BorrowckCtxt, bccx: &'a BorrowckCtxt,
id_range: IdRange, id_range: IdRange,
move_data: move_data::MoveData, move_data: move_data::MoveData,
all_loans: @RefCell<~[Loan]>, all_loans: @RefCell<Vec<Loan> >,
item_ub: ast::NodeId, item_ub: ast::NodeId,
repeating_ids: ~[ast::NodeId] repeating_ids: Vec<ast::NodeId> }
}
impl<'a> visit::Visitor<()> for GatherLoanCtxt<'a> { impl<'a> visit::Visitor<()> for GatherLoanCtxt<'a> {
fn visit_expr(&mut self, ex: &Expr, _: ()) { fn visit_expr(&mut self, ex: &Expr, _: ()) {
@ -103,13 +103,13 @@ impl<'a> visit::Visitor<()> for GatherLoanCtxt<'a> {
} }
pub fn gather_loans(bccx: &BorrowckCtxt, decl: &ast::FnDecl, body: &ast::Block) pub fn gather_loans(bccx: &BorrowckCtxt, decl: &ast::FnDecl, body: &ast::Block)
-> (IdRange, @RefCell<~[Loan]>, move_data::MoveData) { -> (IdRange, @RefCell<Vec<Loan> >, move_data::MoveData) {
let mut glcx = GatherLoanCtxt { let mut glcx = GatherLoanCtxt {
bccx: bccx, bccx: bccx,
id_range: IdRange::max(), id_range: IdRange::max(),
all_loans: @RefCell::new(~[]), all_loans: @RefCell::new(Vec::new()),
item_ub: body.id, item_ub: body.id,
repeating_ids: ~[body.id], repeating_ids: vec!(body.id),
move_data: MoveData::new() move_data: MoveData::new()
}; };
glcx.gather_fn_arg_patterns(decl, body); glcx.gather_fn_arg_patterns(decl, body);

View File

@ -12,7 +12,8 @@
* Computes the restrictions that result from a borrow. * Computes the restrictions that result from a borrow.
*/ */
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
use middle::borrowck::*; use middle::borrowck::*;
use mc = middle::mem_categorization; use mc = middle::mem_categorization;
use middle::ty; use middle::ty;
@ -21,7 +22,7 @@ use util::ppaux::Repr;
pub enum RestrictionResult { pub enum RestrictionResult {
Safe, Safe,
SafeIf(@LoanPath, ~[Restriction]) SafeIf(@LoanPath, Vec<Restriction> )
} }
pub fn compute_restrictions(bccx: &BorrowckCtxt, pub fn compute_restrictions(bccx: &BorrowckCtxt,
@ -75,8 +76,8 @@ impl<'a> RestrictionsContext<'a> {
mc::cat_upvar(ty::UpvarId {var_id: local_id, ..}, _) => { mc::cat_upvar(ty::UpvarId {var_id: local_id, ..}, _) => {
// R-Variable // R-Variable
let lp = @LpVar(local_id); let lp = @LpVar(local_id);
SafeIf(lp, ~[Restriction {loan_path: lp, SafeIf(lp, vec!(Restriction {loan_path: lp,
set: restrictions}]) set: restrictions}))
} }
mc::cat_downcast(cmt_base) => { mc::cat_downcast(cmt_base) => {
@ -173,9 +174,11 @@ impl<'a> RestrictionsContext<'a> {
Safe => Safe, Safe => Safe,
SafeIf(base_lp, base_vec) => { SafeIf(base_lp, base_vec) => {
let lp = @LpExtend(base_lp, mc, elem); let lp = @LpExtend(base_lp, mc, elem);
SafeIf(lp, vec::append_one(base_vec, SafeIf(lp, vec_ng::append_one(base_vec,
Restriction {loan_path: lp, Restriction {
set: restrictions})) loan_path: lp,
set: restrictions
}))
} }
} }
} }

View File

@ -24,6 +24,7 @@ use std::cell::{Cell, RefCell};
use collections::HashMap; use collections::HashMap;
use std::ops::{BitOr, BitAnd}; use std::ops::{BitOr, BitAnd};
use std::result::{Result}; use std::result::{Result};
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
use syntax::ast_util; use syntax::ast_util;
@ -146,7 +147,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt,
body); body);
check_loans::check_loans(this, &loan_dfcx, flowed_moves, check_loans::check_loans(this, &loan_dfcx, flowed_moves,
*all_loans.get(), body); all_loans.get().as_slice(), body);
visit::walk_fn(this, fk, decl, body, sp, id, ()); visit::walk_fn(this, fk, decl, body, sp, id, ());
} }
@ -209,7 +210,7 @@ pub struct Loan {
loan_path: @LoanPath, loan_path: @LoanPath,
cmt: mc::cmt, cmt: mc::cmt,
kind: ty::BorrowKind, kind: ty::BorrowKind,
restrictions: ~[Restriction], restrictions: Vec<Restriction> ,
gen_scope: ast::NodeId, gen_scope: ast::NodeId,
kill_scope: ast::NodeId, kill_scope: ast::NodeId,
span: Span, span: Span,

View File

@ -17,6 +17,7 @@ comments in the section "Moves and initialization" and in `doc.rs`.
use std::cell::RefCell; use std::cell::RefCell;
use std::uint; use std::uint;
use std::vec_ng::Vec;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use middle::borrowck::*; use middle::borrowck::*;
use middle::dataflow::DataFlowContext; use middle::dataflow::DataFlowContext;
@ -32,23 +33,23 @@ use util::ppaux::Repr;
pub struct MoveData { pub struct MoveData {
/// Move paths. See section "Move paths" in `doc.rs`. /// Move paths. See section "Move paths" in `doc.rs`.
paths: RefCell<~[MovePath]>, paths: RefCell<Vec<MovePath> >,
/// Cache of loan path to move path index, for easy lookup. /// Cache of loan path to move path index, for easy lookup.
path_map: RefCell<HashMap<@LoanPath, MovePathIndex>>, path_map: RefCell<HashMap<@LoanPath, MovePathIndex>>,
/// Each move or uninitialized variable gets an entry here. /// Each move or uninitialized variable gets an entry here.
moves: RefCell<~[Move]>, moves: RefCell<Vec<Move> >,
/// Assignments to a variable, like `x = foo`. These are assigned /// Assignments to a variable, like `x = foo`. These are assigned
/// bits for dataflow, since we must track them to ensure that /// bits for dataflow, since we must track them to ensure that
/// immutable variables are assigned at most once along each path. /// immutable variables are assigned at most once along each path.
var_assignments: RefCell<~[Assignment]>, var_assignments: RefCell<Vec<Assignment> >,
/// Assignments to a path, like `x.f = foo`. These are not /// Assignments to a path, like `x.f = foo`. These are not
/// assigned dataflow bits, but we track them because they still /// assigned dataflow bits, but we track them because they still
/// kill move bits. /// kill move bits.
path_assignments: RefCell<~[Assignment]>, path_assignments: RefCell<Vec<Assignment> >,
assignee_ids: RefCell<HashSet<ast::NodeId>>, assignee_ids: RefCell<HashSet<ast::NodeId>>,
} }
@ -173,58 +174,58 @@ pub type AssignDataFlow = DataFlowContext<AssignDataFlowOperator>;
impl MoveData { impl MoveData {
pub fn new() -> MoveData { pub fn new() -> MoveData {
MoveData { MoveData {
paths: RefCell::new(~[]), paths: RefCell::new(Vec::new()),
path_map: RefCell::new(HashMap::new()), path_map: RefCell::new(HashMap::new()),
moves: RefCell::new(~[]), moves: RefCell::new(Vec::new()),
path_assignments: RefCell::new(~[]), path_assignments: RefCell::new(Vec::new()),
var_assignments: RefCell::new(~[]), var_assignments: RefCell::new(Vec::new()),
assignee_ids: RefCell::new(HashSet::new()), assignee_ids: RefCell::new(HashSet::new()),
} }
} }
fn path_loan_path(&self, index: MovePathIndex) -> @LoanPath { fn path_loan_path(&self, index: MovePathIndex) -> @LoanPath {
let paths = self.paths.borrow(); let paths = self.paths.borrow();
paths.get()[index.get()].loan_path paths.get().get(index.get()).loan_path
} }
fn path_parent(&self, index: MovePathIndex) -> MovePathIndex { fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
let paths = self.paths.borrow(); let paths = self.paths.borrow();
paths.get()[index.get()].parent paths.get().get(index.get()).parent
} }
fn path_first_move(&self, index: MovePathIndex) -> MoveIndex { fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
let paths = self.paths.borrow(); let paths = self.paths.borrow();
paths.get()[index.get()].first_move paths.get().get(index.get()).first_move
} }
fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex { fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
let paths = self.paths.borrow(); let paths = self.paths.borrow();
paths.get()[index.get()].first_child paths.get().get(index.get()).first_child
} }
fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex { fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
let paths = self.paths.borrow(); let paths = self.paths.borrow();
paths.get()[index.get()].next_sibling paths.get().get(index.get()).next_sibling
} }
fn set_path_first_move(&self, fn set_path_first_move(&self,
index: MovePathIndex, index: MovePathIndex,
first_move: MoveIndex) { first_move: MoveIndex) {
let mut paths = self.paths.borrow_mut(); let mut paths = self.paths.borrow_mut();
paths.get()[index.get()].first_move = first_move paths.get().get_mut(index.get()).first_move = first_move
} }
fn set_path_first_child(&self, fn set_path_first_child(&self,
index: MovePathIndex, index: MovePathIndex,
first_child: MovePathIndex) { first_child: MovePathIndex) {
let mut paths = self.paths.borrow_mut(); let mut paths = self.paths.borrow_mut();
paths.get()[index.get()].first_child = first_child paths.get().get_mut(index.get()).first_child = first_child
} }
fn move_next_move(&self, index: MoveIndex) -> MoveIndex { fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
//! Type safe indexing operator //! Type safe indexing operator
let moves = self.moves.borrow(); let moves = self.moves.borrow();
moves.get()[index.get()].next_move moves.get().get(index.get()).next_move
} }
fn is_var_path(&self, index: MovePathIndex) -> bool { fn is_var_path(&self, index: MovePathIndex) -> bool {
@ -605,7 +606,7 @@ impl FlowedMoveData {
self.dfcx_moves.each_gen_bit_frozen(id, |index| { self.dfcx_moves.each_gen_bit_frozen(id, |index| {
let moves = self.move_data.moves.borrow(); let moves = self.move_data.moves.borrow();
let move = &moves.get()[index]; let move = moves.get().get(index);
let moved_path = move.path; let moved_path = move.path;
f(move, self.move_data.path_loan_path(moved_path)) f(move, self.move_data.path_loan_path(moved_path))
}) })
@ -644,7 +645,7 @@ impl FlowedMoveData {
self.dfcx_moves.each_bit_on_entry_frozen(id, |index| { self.dfcx_moves.each_bit_on_entry_frozen(id, |index| {
let moves = self.move_data.moves.borrow(); let moves = self.move_data.moves.borrow();
let move = &moves.get()[index]; let move = moves.get().get(index);
let moved_path = move.path; let moved_path = move.path;
if base_indices.iter().any(|x| x == &moved_path) { if base_indices.iter().any(|x| x == &moved_path) {
// Scenario 1 or 2: `loan_path` or some base path of // Scenario 1 or 2: `loan_path` or some base path of
@ -702,7 +703,7 @@ impl FlowedMoveData {
self.dfcx_assign.each_bit_on_entry_frozen(id, |index| { self.dfcx_assign.each_bit_on_entry_frozen(id, |index| {
let var_assignments = self.move_data.var_assignments.borrow(); let var_assignments = self.move_data.var_assignments.borrow();
let assignment = &var_assignments.get()[index]; let assignment = var_assignments.get().get(index);
if assignment.path == loan_path_index && !f(assignment) { if assignment.path == loan_path_index && !f(assignment) {
false false
} else { } else {

View File

@ -12,6 +12,7 @@ use middle::cfg::*;
use middle::graph; use middle::graph;
use middle::typeck; use middle::typeck;
use middle::ty; use middle::ty;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::opt_vec; use syntax::opt_vec;
@ -22,7 +23,7 @@ struct CFGBuilder {
method_map: typeck::MethodMap, method_map: typeck::MethodMap,
exit_map: NodeMap<CFGIndex>, exit_map: NodeMap<CFGIndex>,
graph: CFGGraph, graph: CFGGraph,
loop_scopes: ~[LoopScope], loop_scopes: Vec<LoopScope> ,
} }
struct LoopScope { struct LoopScope {
@ -39,7 +40,7 @@ pub fn construct(tcx: ty::ctxt,
graph: graph::Graph::new(), graph: graph::Graph::new(),
tcx: tcx, tcx: tcx,
method_map: method_map, method_map: method_map,
loop_scopes: ~[] loop_scopes: Vec::new()
}; };
let entry = cfg_builder.add_node(0, []); let entry = cfg_builder.add_node(0, []);
let exit = cfg_builder.block(blk, entry); let exit = cfg_builder.block(blk, entry);
@ -328,7 +329,7 @@ impl CFGBuilder {
ast::ExprRet(v) => { ast::ExprRet(v) => {
let v_exit = self.opt_expr(v, pred); let v_exit = self.opt_expr(v, pred);
let loop_scope = self.loop_scopes[0]; let loop_scope = *self.loop_scopes.get(0);
self.add_exiting_edge(expr, v_exit, self.add_exiting_edge(expr, v_exit,
loop_scope, loop_scope.break_index); loop_scope, loop_scope.break_index);
self.add_node(expr.id, []) self.add_node(expr.id, [])
@ -375,9 +376,9 @@ impl CFGBuilder {
ast::ExprStruct(_, ref fields, base) => { ast::ExprStruct(_, ref fields, base) => {
let base_exit = self.opt_expr(base, pred); let base_exit = self.opt_expr(base, pred);
let field_exprs: ~[@ast::Expr] = let field_exprs: Vec<@ast::Expr> =
fields.iter().map(|f| f.expr).collect(); fields.iter().map(|f| f.expr).collect();
self.straightline(expr, base_exit, field_exprs) self.straightline(expr, base_exit, field_exprs.as_slice())
} }
ast::ExprRepeat(elem, count, _) => { ast::ExprRepeat(elem, count, _) => {

View File

@ -15,6 +15,7 @@ use middle::ty;
use middle::typeck; use middle::typeck;
use util::ppaux; use util::ppaux;
use std::vec_ng::Vec;
use syntax::ast::*; use syntax::ast::*;
use syntax::{ast_util, ast_map}; use syntax::{ast_util, ast_map};
use syntax::visit::Visitor; use syntax::visit::Visitor;
@ -207,8 +208,7 @@ struct CheckItemRecursionVisitor<'a> {
sess: Session, sess: Session,
ast_map: &'a ast_map::Map, ast_map: &'a ast_map::Map,
def_map: resolve::DefMap, def_map: resolve::DefMap,
idstack: ~[NodeId] idstack: Vec<NodeId> }
}
// Make sure a const item doesn't recursively refer to itself // Make sure a const item doesn't recursively refer to itself
// FIXME: Should use the dependency graph when it's available (#1356) // FIXME: Should use the dependency graph when it's available (#1356)
@ -222,7 +222,7 @@ pub fn check_item_recursion<'a>(sess: Session,
sess: sess, sess: sess,
ast_map: ast_map, ast_map: ast_map,
def_map: def_map, def_map: def_map,
idstack: ~[] idstack: Vec::new()
}; };
visitor.visit_item(it, ()); visitor.visit_item(it, ());
} }

View File

@ -21,7 +21,8 @@ use util::ppaux::ty_to_str;
use std::cmp; use std::cmp;
use std::iter; use std::iter;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
use syntax::ast::*; use syntax::ast::*;
use syntax::ast_util::{unguarded_pat, walk_pat}; use syntax::ast_util::{unguarded_pat, walk_pat};
use syntax::codemap::{DUMMY_SP, Span}; use syntax::codemap::{DUMMY_SP, Span};
@ -105,7 +106,7 @@ fn check_expr(v: &mut CheckMatchVisitor,
_ => { /* We assume only enum types can be uninhabited */ } _ => { /* We assume only enum types can be uninhabited */ }
} }
let pats: ~[@Pat] = arms.iter() let pats: Vec<@Pat> = arms.iter()
.filter_map(unguarded_pat) .filter_map(unguarded_pat)
.flat_map(|pats| pats.move_iter()) .flat_map(|pats| pats.move_iter())
.collect(); .collect();
@ -121,7 +122,7 @@ fn check_expr(v: &mut CheckMatchVisitor,
// Check for unreachable patterns // Check for unreachable patterns
fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) { fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) {
let mut seen = ~[]; let mut seen = Vec::new();
for arm in arms.iter() { for arm in arms.iter() {
for pat in arm.pats.iter() { for pat in arm.pats.iter() {
@ -151,8 +152,8 @@ fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) {
true true
}); });
let v = ~[*pat]; let v = vec!(*pat);
match is_useful(cx, &seen, v) { match is_useful(cx, &seen, v.as_slice()) {
not_useful => { not_useful => {
cx.tcx.sess.span_err(pat.span, "unreachable pattern"); cx.tcx.sess.span_err(pat.span, "unreachable pattern");
} }
@ -170,9 +171,9 @@ fn raw_pat(p: @Pat) -> @Pat {
} }
} }
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) { fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
assert!((!pats.is_empty())); assert!((!pats.is_empty()));
let ext = match is_useful(cx, &pats.map(|p| ~[*p]), [wild()]) { let ext = match is_useful(cx, &pats.map(|p| vec!(*p)), [wild()]) {
not_useful => { not_useful => {
// This is good, wildcard pattern isn't reachable // This is good, wildcard pattern isn't reachable
return; return;
@ -218,7 +219,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
cx.tcx.sess.span_err(sp, msg); cx.tcx.sess.span_err(sp, msg);
} }
type matrix = ~[~[@Pat]]; type matrix = Vec<Vec<@Pat> > ;
#[deriving(Clone)] #[deriving(Clone)]
enum useful { enum useful {
@ -250,10 +251,14 @@ enum ctor {
// Note: is_useful doesn't work on empty types, as the paper notes. // Note: is_useful doesn't work on empty types, as the paper notes.
// So it assumes that v is non-empty. // So it assumes that v is non-empty.
fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
if m.len() == 0u { return useful_; } if m.len() == 0u {
if m[0].len() == 0u { return not_useful; } return useful_;
let real_pat = match m.iter().find(|r| r[0].id != 0) { }
Some(r) => r[0], None => v[0] if m.get(0).len() == 0u {
return not_useful
}
let real_pat = match m.iter().find(|r| r.get(0).id != 0) {
Some(r) => *r.get(0), None => v[0]
}; };
let left_ty = if real_pat.id == 0 { ty::mk_nil() } let left_ty = if real_pat.id == 0 { ty::mk_nil() }
else { ty::node_id_to_type(cx.tcx, real_pat.id) }; else { ty::node_id_to_type(cx.tcx, real_pat.id) };
@ -290,7 +295,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
} }
ty::ty_unboxed_vec(..) | ty::ty_vec(..) => { ty::ty_unboxed_vec(..) | ty::ty_vec(..) => {
let max_len = m.rev_iter().fold(0, |max_len, r| { let max_len = m.rev_iter().fold(0, |max_len, r| {
match r[0].node { match r.get(0).node {
PatVec(ref before, _, ref after) => { PatVec(ref before, _, ref after) => {
cmp::max(before.len() + after.len(), max_len) cmp::max(before.len() + after.len(), max_len)
} }
@ -313,7 +318,9 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
} }
Some(ref ctor) => { Some(ref ctor) => {
match is_useful(cx, match is_useful(cx,
&m.iter().filter_map(|r| default(cx, *r)).collect::<matrix>(), &m.iter().filter_map(|r| {
default(cx, r.as_slice())
}).collect::<matrix>(),
v.tail()) { v.tail()) {
useful_ => useful(left_ty, (*ctor).clone()), useful_ => useful(left_ty, (*ctor).clone()),
ref u => (*u).clone(), ref u => (*u).clone(),
@ -334,10 +341,12 @@ fn is_useful_specialized(cx: &MatchCheckCtxt,
ctor: ctor, ctor: ctor,
arity: uint, arity: uint,
lty: ty::t) lty: ty::t)
-> useful { -> useful {
let ms = m.iter().filter_map(|r| specialize(cx, *r, &ctor, arity, lty)).collect::<matrix>(); let ms = m.iter().filter_map(|r| {
specialize(cx, r.as_slice(), &ctor, arity, lty)
}).collect::<matrix>();
let could_be_useful = is_useful( let could_be_useful = is_useful(
cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap()); cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap().as_slice());
match could_be_useful { match could_be_useful {
useful_ => useful(lty, ctor), useful_ => useful(lty, ctor),
ref u => (*u).clone(), ref u => (*u).clone(),
@ -408,14 +417,14 @@ fn missing_ctor(cx: &MatchCheckCtxt,
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(..) | ty::ty_tup(_) | ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(..) | ty::ty_tup(_) |
ty::ty_struct(..) => { ty::ty_struct(..) => {
for r in m.iter() { for r in m.iter() {
if !is_wild(cx, r[0]) { return None; } if !is_wild(cx, *r.get(0)) { return None; }
} }
return Some(single); return Some(single);
} }
ty::ty_enum(eid, _) => { ty::ty_enum(eid, _) => {
let mut found = ~[]; let mut found = Vec::new();
for r in m.iter() { for r in m.iter() {
let r = pat_ctor_id(cx, r[0]); let r = pat_ctor_id(cx, *r.get(0));
for id in r.iter() { for id in r.iter() {
if !found.contains(id) { if !found.contains(id) {
found.push((*id).clone()); found.push((*id).clone());
@ -437,7 +446,7 @@ fn missing_ctor(cx: &MatchCheckCtxt,
let mut true_found = false; let mut true_found = false;
let mut false_found = false; let mut false_found = false;
for r in m.iter() { for r in m.iter() {
match pat_ctor_id(cx, r[0]) { match pat_ctor_id(cx, *r.get(0)) {
None => (), None => (),
Some(val(const_bool(true))) => true_found = true, Some(val(const_bool(true))) => true_found = true,
Some(val(const_bool(false))) => false_found = true, Some(val(const_bool(false))) => false_found = true,
@ -452,7 +461,7 @@ fn missing_ctor(cx: &MatchCheckCtxt,
let mut missing = true; let mut missing = true;
let mut wrong = false; let mut wrong = false;
for r in m.iter() { for r in m.iter() {
match r[0].node { match r.get(0).node {
PatVec(ref before, ref slice, ref after) => { PatVec(ref before, ref slice, ref after) => {
let count = before.len() + after.len(); let count = before.len() + after.len();
if (count < n && slice.is_none()) || count > n { if (count < n && slice.is_none()) || count > n {
@ -475,13 +484,13 @@ fn missing_ctor(cx: &MatchCheckCtxt,
// Find the lengths and slices of all vector patterns. // Find the lengths and slices of all vector patterns.
let mut vec_pat_lens = m.iter().filter_map(|r| { let mut vec_pat_lens = m.iter().filter_map(|r| {
match r[0].node { match r.get(0).node {
PatVec(ref before, ref slice, ref after) => { PatVec(ref before, ref slice, ref after) => {
Some((before.len() + after.len(), slice.is_some())) Some((before.len() + after.len(), slice.is_some()))
} }
_ => None _ => None
} }
}).collect::<~[(uint, bool)]>(); }).collect::<Vec<(uint, bool)> >();
// Sort them by length such that for patterns of the same length, // Sort them by length such that for patterns of the same length,
// those with a destructured slice come first. // those with a destructured slice come first.
@ -559,17 +568,18 @@ fn specialize(cx: &MatchCheckCtxt,
ctor_id: &ctor, ctor_id: &ctor,
arity: uint, arity: uint,
left_ty: ty::t) left_ty: ty::t)
-> Option<~[@Pat]> { -> Option<Vec<@Pat> > {
// Sad, but I can't get rid of this easily // Sad, but I can't get rid of this easily
let r0 = (*raw_pat(r[0])).clone(); let r0 = (*raw_pat(r[0])).clone();
match r0 { match r0 {
Pat{id: pat_id, node: n, span: pat_span} => Pat{id: pat_id, node: n, span: pat_span} =>
match n { match n {
PatWild => { PatWild => {
Some(vec::append(vec::from_elem(arity, wild()), r.tail())) Some(vec_ng::append(Vec::from_elem(arity, wild()), r.tail()))
} }
PatWildMulti => { PatWildMulti => {
Some(vec::append(vec::from_elem(arity, wild_multi()), r.tail())) Some(vec_ng::append(Vec::from_elem(arity, wild_multi()),
r.tail()))
} }
PatIdent(_, _, _) => { PatIdent(_, _, _) => {
let opt_def = { let opt_def = {
@ -579,7 +589,7 @@ fn specialize(cx: &MatchCheckCtxt,
match opt_def { match opt_def {
Some(DefVariant(_, id, _)) => { Some(DefVariant(_, id, _)) => {
if variant(id) == *ctor_id { if variant(id) == *ctor_id {
Some(r.tail().to_owned()) Some(Vec::from_slice(r.tail()))
} else { } else {
None None
} }
@ -617,15 +627,15 @@ fn specialize(cx: &MatchCheckCtxt,
_ => fail!("type error") _ => fail!("type error")
}; };
if match_ { if match_ {
Some(r.tail().to_owned()) Some(Vec::from_slice(r.tail()))
} else { } else {
None None
} }
} }
_ => { _ => {
Some( Some(
vec::append( vec_ng::append(
vec::from_elem(arity, wild()), Vec::from_elem(arity, wild()),
r.tail() r.tail()
) )
) )
@ -668,7 +678,7 @@ fn specialize(cx: &MatchCheckCtxt,
_ => fail!("type error") _ => fail!("type error")
}; };
if match_ { if match_ {
Some(r.tail().to_owned()) Some(Vec::from_slice(r.tail()))
} else { } else {
None None
} }
@ -676,9 +686,9 @@ fn specialize(cx: &MatchCheckCtxt,
DefVariant(_, id, _) if variant(id) == *ctor_id => { DefVariant(_, id, _) if variant(id) == *ctor_id => {
let args = match args { let args = match args {
Some(args) => args.iter().map(|x| *x).collect(), Some(args) => args.iter().map(|x| *x).collect(),
None => vec::from_elem(arity, wild()) None => Vec::from_elem(arity, wild())
}; };
Some(vec::append(args, r.tail())) Some(vec_ng::append(args, r.tail()))
} }
DefVariant(_, _, _) => None, DefVariant(_, _, _) => None,
@ -689,9 +699,9 @@ fn specialize(cx: &MatchCheckCtxt,
Some(args) => { Some(args) => {
new_args = args.iter().map(|x| *x).collect() new_args = args.iter().map(|x| *x).collect()
} }
None => new_args = vec::from_elem(arity, wild()) None => new_args = Vec::from_elem(arity, wild())
} }
Some(vec::append(new_args, r.tail())) Some(vec_ng::append(new_args, r.tail()))
} }
_ => None _ => None
} }
@ -712,7 +722,7 @@ fn specialize(cx: &MatchCheckCtxt,
_ => wild() _ => wild()
} }
}); });
Some(vec::append(args, r.tail())) Some(vec_ng::append(args, r.tail()))
} else { } else {
None None
} }
@ -743,15 +753,15 @@ fn specialize(cx: &MatchCheckCtxt,
_ => wild() _ => wild()
} }
}).collect(); }).collect();
Some(vec::append(args, r.tail())) Some(vec_ng::append(args, r.tail()))
} }
} }
} }
PatTup(args) => { PatTup(args) => {
Some(vec::append(args.iter().map(|x| *x).collect(), r.tail())) Some(vec_ng::append(args.iter().map(|x| *x).collect(), r.tail()))
} }
PatUniq(a) | PatRegion(a) => { PatUniq(a) | PatRegion(a) => {
Some(vec::append(~[a], r.tail())) Some(vec_ng::append(vec!(a), r.tail()))
} }
PatLit(expr) => { PatLit(expr) => {
let e_v = eval_const_expr(cx.tcx, expr); let e_v = eval_const_expr(cx.tcx, expr);
@ -781,13 +791,17 @@ fn specialize(cx: &MatchCheckCtxt,
single => true, single => true,
_ => fail!("type error") _ => fail!("type error")
}; };
if match_ { Some(r.tail().to_owned()) } else { None } if match_ {
Some(Vec::from_slice(r.tail()))
} else {
None
}
} }
PatRange(lo, hi) => { PatRange(lo, hi) => {
let (c_lo, c_hi) = match *ctor_id { let (c_lo, c_hi) = match *ctor_id {
val(ref v) => ((*v).clone(), (*v).clone()), val(ref v) => ((*v).clone(), (*v).clone()),
range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()), range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()),
single => return Some(r.tail().to_owned()), single => return Some(Vec::from_slice(r.tail())),
_ => fail!("type error") _ => fail!("type error")
}; };
let v_lo = eval_const_expr(cx.tcx, lo); let v_lo = eval_const_expr(cx.tcx, lo);
@ -797,7 +811,7 @@ fn specialize(cx: &MatchCheckCtxt,
let m2 = compare_const_vals(&c_hi, &v_hi); let m2 = compare_const_vals(&c_hi, &v_hi);
match (m1, m2) { match (m1, m2) {
(Some(val1), Some(val2)) if val1 >= 0 && val2 <= 0 => { (Some(val1), Some(val2)) if val1 >= 0 && val2 <= 0 => {
Some(r.tail().to_owned()) Some(Vec::from_slice(r.tail()))
}, },
(Some(_), Some(_)) => None, (Some(_), Some(_)) => None,
_ => { _ => {
@ -812,7 +826,7 @@ fn specialize(cx: &MatchCheckCtxt,
vec(_) => { vec(_) => {
let num_elements = before.len() + after.len(); let num_elements = before.len() + after.len();
if num_elements < arity && slice.is_some() { if num_elements < arity && slice.is_some() {
let mut result = ~[]; let mut result = Vec::new();
for pat in before.iter() { for pat in before.iter() {
result.push((*pat).clone()); result.push((*pat).clone());
} }
@ -827,7 +841,7 @@ fn specialize(cx: &MatchCheckCtxt,
} }
Some(result) Some(result)
} else if num_elements == arity { } else if num_elements == arity {
let mut result = ~[]; let mut result = Vec::new();
for pat in before.iter() { for pat in before.iter() {
result.push((*pat).clone()); result.push((*pat).clone());
} }
@ -849,9 +863,12 @@ fn specialize(cx: &MatchCheckCtxt,
} }
} }
fn default(cx: &MatchCheckCtxt, r: &[@Pat]) -> Option<~[@Pat]> { fn default(cx: &MatchCheckCtxt, r: &[@Pat]) -> Option<Vec<@Pat> > {
if is_wild(cx, r[0]) { Some(r.tail().to_owned()) } if is_wild(cx, r[0]) {
else { None } Some(Vec::from_slice(r.tail()))
} else {
None
}
} }
fn check_local(v: &mut CheckMatchVisitor, fn check_local(v: &mut CheckMatchVisitor,

View File

@ -27,6 +27,7 @@ use syntax::{ast, ast_map, ast_util};
use std::cell::RefCell; use std::cell::RefCell;
use collections::HashMap; use collections::HashMap;
use std::rc::Rc; use std::rc::Rc;
use std::vec_ng::Vec;
// //
// This pass classifies expressions by their constant-ness. // This pass classifies expressions by their constant-ness.
@ -321,7 +322,7 @@ pub enum const_val {
const_int(i64), const_int(i64),
const_uint(u64), const_uint(u64),
const_str(InternedString), const_str(InternedString),
const_binary(Rc<~[u8]>), const_binary(Rc<Vec<u8> >),
const_bool(bool) const_bool(bool)
} }

View File

@ -20,6 +20,7 @@
use std::io; use std::io;
use std::uint; use std::uint;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::ast_util::IdRange; use syntax::ast_util::IdRange;
@ -54,15 +55,14 @@ pub struct DataFlowContext<O> {
// the full vector (see the method `compute_id_range()`). // the full vector (see the method `compute_id_range()`).
/// bits generated as we exit the scope `id`. Updated by `add_gen()`. /// bits generated as we exit the scope `id`. Updated by `add_gen()`.
priv gens: ~[uint], priv gens: Vec<uint> ,
/// bits killed as we exit the scope `id`. Updated by `add_kill()`. /// bits killed as we exit the scope `id`. Updated by `add_kill()`.
priv kills: ~[uint], priv kills: Vec<uint> ,
/// bits that are valid on entry to the scope `id`. Updated by /// bits that are valid on entry to the scope `id`. Updated by
/// `propagate()`. /// `propagate()`.
priv on_entry: ~[uint] priv on_entry: Vec<uint> }
}
/// Parameterization for the precise form of data flow that is used. /// Parameterization for the precise form of data flow that is used.
pub trait DataFlowOperator { pub trait DataFlowOperator {
@ -80,7 +80,7 @@ struct PropagationContext<'a, O> {
struct LoopScope<'a> { struct LoopScope<'a> {
loop_id: ast::NodeId, loop_id: ast::NodeId,
break_bits: ~[uint] break_bits: Vec<uint>
} }
impl<O:DataFlowOperator> pprust::PpAnn for DataFlowContext<O> { impl<O:DataFlowOperator> pprust::PpAnn for DataFlowContext<O> {
@ -131,9 +131,9 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
debug!("DataFlowContext::new(id_range={:?}, bits_per_id={:?}, words_per_id={:?})", debug!("DataFlowContext::new(id_range={:?}, bits_per_id={:?}, words_per_id={:?})",
id_range, bits_per_id, words_per_id); id_range, bits_per_id, words_per_id);
let gens = ~[]; let gens = Vec::new();
let kills = ~[]; let kills = Vec::new();
let on_entry = ~[]; let on_entry = Vec::new();
DataFlowContext { DataFlowContext {
tcx: tcx, tcx: tcx,
@ -332,7 +332,7 @@ impl<O:DataFlowOperator+Clone+'static> DataFlowContext<O> {
}; };
let mut temp = vec::from_elem(self.words_per_id, 0u); let mut temp = vec::from_elem(self.words_per_id, 0u);
let mut loop_scopes = ~[]; let mut loop_scopes = Vec::new();
while propcx.changed { while propcx.changed {
propcx.changed = false; propcx.changed = false;
@ -367,7 +367,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_block(&mut self, fn walk_block(&mut self,
blk: &ast::Block, blk: &ast::Block,
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
debug!("DataFlowContext::walk_block(blk.id={}, in_out={})", debug!("DataFlowContext::walk_block(blk.id={}, in_out={})",
blk.id, bits_to_str(in_out)); blk.id, bits_to_str(in_out));
@ -385,7 +385,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_stmt(&mut self, fn walk_stmt(&mut self,
stmt: @ast::Stmt, stmt: @ast::Stmt,
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
match stmt.node { match stmt.node {
ast::StmtDecl(decl, _) => { ast::StmtDecl(decl, _) => {
self.walk_decl(decl, in_out, loop_scopes); self.walk_decl(decl, in_out, loop_scopes);
@ -404,7 +404,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_decl(&mut self, fn walk_decl(&mut self,
decl: @ast::Decl, decl: @ast::Decl,
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
match decl.node { match decl.node {
ast::DeclLocal(local) => { ast::DeclLocal(local) => {
self.walk_opt_expr(local.init, in_out, loop_scopes); self.walk_opt_expr(local.init, in_out, loop_scopes);
@ -418,7 +418,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_expr(&mut self, fn walk_expr(&mut self,
expr: &ast::Expr, expr: &ast::Expr,
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
debug!("DataFlowContext::walk_expr(expr={}, in_out={})", debug!("DataFlowContext::walk_expr(expr={}, in_out={})",
expr.repr(self.dfcx.tcx), bits_to_str(in_out)); expr.repr(self.dfcx.tcx), bits_to_str(in_out));
@ -469,12 +469,12 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
let mut body_bits = in_out.to_owned(); let mut body_bits = in_out.to_owned();
loop_scopes.push(LoopScope { loop_scopes.push(LoopScope {
loop_id: expr.id, loop_id: expr.id,
break_bits: in_out.to_owned() break_bits: Vec::from_slice(in_out)
}); });
self.walk_block(blk, body_bits, loop_scopes); self.walk_block(blk, body_bits, loop_scopes);
self.add_to_entry_set(expr.id, body_bits); self.add_to_entry_set(expr.id, body_bits);
let new_loop_scope = loop_scopes.pop().unwrap(); let new_loop_scope = loop_scopes.pop().unwrap();
copy_bits(new_loop_scope.break_bits, in_out); copy_bits(new_loop_scope.break_bits.as_slice(), in_out);
} }
ast::ExprForLoop(..) => fail!("non-desugared expr_for_loop"), ast::ExprForLoop(..) => fail!("non-desugared expr_for_loop"),
@ -493,14 +493,14 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
self.reset(in_out); self.reset(in_out);
loop_scopes.push(LoopScope { loop_scopes.push(LoopScope {
loop_id: expr.id, loop_id: expr.id,
break_bits: in_out.to_owned() break_bits: Vec::from_slice(in_out)
}); });
self.walk_block(blk, body_bits, loop_scopes); self.walk_block(blk, body_bits, loop_scopes);
self.add_to_entry_set(expr.id, body_bits); self.add_to_entry_set(expr.id, body_bits);
let new_loop_scope = loop_scopes.pop().unwrap(); let new_loop_scope = loop_scopes.pop().unwrap();
assert_eq!(new_loop_scope.loop_id, expr.id); assert_eq!(new_loop_scope.loop_id, expr.id);
copy_bits(new_loop_scope.break_bits, in_out); copy_bits(new_loop_scope.break_bits.as_slice(), in_out);
} }
ast::ExprMatch(discr, ref arms) => { ast::ExprMatch(discr, ref arms) => {
@ -691,7 +691,9 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
in_out: &mut [uint]) { in_out: &mut [uint]) {
self.pop_scopes(from_expr, to_scope, in_out); self.pop_scopes(from_expr, to_scope, in_out);
self.dfcx.apply_kill(from_expr.id, in_out); self.dfcx.apply_kill(from_expr.id, in_out);
join_bits(&self.dfcx.oper, in_out, to_scope.break_bits); join_bits(&self.dfcx.oper,
in_out,
to_scope.break_bits.as_mut_slice());
debug!("break_from_to(from_expr={}, to_scope={}) final break_bits={}", debug!("break_from_to(from_expr={}, to_scope={}) final break_bits={}",
from_expr.repr(self.tcx()), from_expr.repr(self.tcx()),
to_scope.loop_id, to_scope.loop_id,
@ -701,7 +703,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_exprs(&mut self, fn walk_exprs(&mut self,
exprs: &[@ast::Expr], exprs: &[@ast::Expr],
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
for &expr in exprs.iter() { for &expr in exprs.iter() {
self.walk_expr(expr, in_out, loop_scopes); self.walk_expr(expr, in_out, loop_scopes);
} }
@ -710,7 +712,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_opt_expr(&mut self, fn walk_opt_expr(&mut self,
opt_expr: Option<@ast::Expr>, opt_expr: Option<@ast::Expr>,
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
for &expr in opt_expr.iter() { for &expr in opt_expr.iter() {
self.walk_expr(expr, in_out, loop_scopes); self.walk_expr(expr, in_out, loop_scopes);
} }
@ -720,7 +722,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
call_id: ast::NodeId, call_id: ast::NodeId,
args: &[@ast::Expr], args: &[@ast::Expr],
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
self.walk_exprs(args, in_out, loop_scopes); self.walk_exprs(args, in_out, loop_scopes);
// FIXME(#6268) nested method calls // FIXME(#6268) nested method calls
@ -737,7 +739,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_pat(&mut self, fn walk_pat(&mut self,
pat: @ast::Pat, pat: @ast::Pat,
in_out: &mut [uint], in_out: &mut [uint],
_loop_scopes: &mut ~[LoopScope]) { _loop_scopes: &mut Vec<LoopScope> ) {
debug!("DataFlowContext::walk_pat(pat={}, in_out={})", debug!("DataFlowContext::walk_pat(pat={}, in_out={})",
pat.repr(self.dfcx.tcx), bits_to_str(in_out)); pat.repr(self.dfcx.tcx), bits_to_str(in_out));
@ -752,7 +754,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
fn walk_pat_alternatives(&mut self, fn walk_pat_alternatives(&mut self,
pats: &[@ast::Pat], pats: &[@ast::Pat],
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut Vec<LoopScope> ) {
if pats.len() == 1 { if pats.len() == 1 {
// Common special case: // Common special case:
return self.walk_pat(pats[0], in_out, loop_scopes); return self.walk_pat(pats[0], in_out, loop_scopes);
@ -769,10 +771,12 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
} }
} }
fn find_scope<'a>(&self, fn find_scope<'a,'b>(
expr: &ast::Expr, &self,
label: Option<ast::Ident>, expr: &ast::Expr,
loop_scopes: &'a mut ~[LoopScope]) -> &'a mut LoopScope { label: Option<ast::Ident>,
loop_scopes: &'a mut Vec<LoopScope<'b>>)
-> &'a mut LoopScope<'b> {
let index = match label { let index = match label {
None => { None => {
let len = loop_scopes.len(); let len = loop_scopes.len();
@ -802,7 +806,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
} }
}; };
&mut loop_scopes[index] loop_scopes.get_mut(index)
} }
fn is_method_call(&self, expr: &ast::Expr) -> bool { fn is_method_call(&self, expr: &ast::Expr) -> bool {

View File

@ -19,6 +19,7 @@ use middle::typeck;
use util::nodemap::NodeSet; use util::nodemap::NodeSet;
use collections::HashSet; use collections::HashSet;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
use syntax::ast_util::{local_def, def_id_of_def, is_local}; use syntax::ast_util::{local_def, def_id_of_def, is_local};
@ -49,7 +50,7 @@ fn should_explore(tcx: ty::ctxt, def_id: ast::DefId) -> bool {
} }
struct MarkSymbolVisitor { struct MarkSymbolVisitor {
worklist: ~[ast::NodeId], worklist: Vec<ast::NodeId> ,
method_map: typeck::MethodMap, method_map: typeck::MethodMap,
tcx: ty::ctxt, tcx: ty::ctxt,
live_symbols: ~HashSet<ast::NodeId>, live_symbols: ~HashSet<ast::NodeId>,
@ -58,7 +59,7 @@ struct MarkSymbolVisitor {
impl MarkSymbolVisitor { impl MarkSymbolVisitor {
fn new(tcx: ty::ctxt, fn new(tcx: ty::ctxt,
method_map: typeck::MethodMap, method_map: typeck::MethodMap,
worklist: ~[ast::NodeId]) -> MarkSymbolVisitor { worklist: Vec<ast::NodeId> ) -> MarkSymbolVisitor {
MarkSymbolVisitor { MarkSymbolVisitor {
worklist: worklist, worklist: worklist,
method_map: method_map, method_map: method_map,
@ -216,7 +217,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool {
// 2) We are not sure to be live or not // 2) We are not sure to be live or not
// * Implementation of a trait method // * Implementation of a trait method
struct LifeSeeder { struct LifeSeeder {
worklist: ~[ast::NodeId], worklist: Vec<ast::NodeId> ,
} }
impl Visitor<()> for LifeSeeder { impl Visitor<()> for LifeSeeder {
@ -254,8 +255,8 @@ impl Visitor<()> for LifeSeeder {
fn create_and_seed_worklist(tcx: ty::ctxt, fn create_and_seed_worklist(tcx: ty::ctxt,
exported_items: &privacy::ExportedItems, exported_items: &privacy::ExportedItems,
reachable_symbols: &NodeSet, reachable_symbols: &NodeSet,
krate: &ast::Crate) -> ~[ast::NodeId] { krate: &ast::Crate) -> Vec<ast::NodeId> {
let mut worklist = ~[]; let mut worklist = Vec::new();
// Preferably, we would only need to seed the worklist with reachable // Preferably, we would only need to seed the worklist with reachable
// symbols. However, since the set of reachable symbols differs // symbols. However, since the set of reachable symbols differs

View File

@ -11,6 +11,7 @@
use driver::session; use driver::session;
use driver::session::Session; use driver::session::Session;
use std::vec_ng::Vec;
use syntax::ast::{Crate, Name, NodeId, Item, ItemFn}; use syntax::ast::{Crate, Name, NodeId, Item, ItemFn};
use syntax::ast_map; use syntax::ast_map;
use syntax::attr; use syntax::attr;
@ -38,7 +39,7 @@ struct EntryContext<'a> {
// The functions that one might think are 'main' but aren't, e.g. // The functions that one might think are 'main' but aren't, e.g.
// main functions not defined at the top level. For diagnostics. // main functions not defined at the top level. For diagnostics.
non_main_fns: ~[(NodeId, Span)], non_main_fns: Vec<(NodeId, Span)> ,
} }
impl<'a> Visitor<()> for EntryContext<'a> { impl<'a> Visitor<()> for EntryContext<'a> {
@ -66,7 +67,7 @@ pub fn find_entry_point(session: Session, krate: &Crate, ast_map: &ast_map::Map)
main_fn: None, main_fn: None,
attr_main_fn: None, attr_main_fn: None,
start_fn: None, start_fn: None,
non_main_fns: ~[], non_main_fns: Vec::new(),
}; };
visit::walk_crate(&mut ctxt, krate, ()); visit::walk_crate(&mut ctxt, krate, ());

View File

@ -17,6 +17,7 @@ use middle::resolve;
use middle::ty; use middle::ty;
use util::nodemap::{NodeMap, NodeSet}; use util::nodemap::{NodeMap, NodeSet};
use std::vec_ng::Vec;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::{ast, ast_util}; use syntax::{ast, ast_util};
use syntax::visit; use syntax::visit;
@ -29,12 +30,12 @@ pub struct freevar_entry {
def: ast::Def, //< The variable being accessed free. def: ast::Def, //< The variable being accessed free.
span: Span //< First span where it is accessed (there can be multiple) span: Span //< First span where it is accessed (there can be multiple)
} }
pub type freevar_info = @~[@freevar_entry]; pub type freevar_info = @Vec<@freevar_entry> ;
pub type freevar_map = NodeMap<freevar_info>; pub type freevar_map = NodeMap<freevar_info>;
struct CollectFreevarsVisitor { struct CollectFreevarsVisitor {
seen: NodeSet, seen: NodeSet,
refs: ~[@freevar_entry], refs: Vec<@freevar_entry> ,
def_map: resolve::DefMap, def_map: resolve::DefMap,
} }
@ -90,7 +91,7 @@ impl Visitor<int> for CollectFreevarsVisitor {
// in order to start the search. // in order to start the search.
fn collect_freevars(def_map: resolve::DefMap, blk: &ast::Block) -> freevar_info { fn collect_freevars(def_map: resolve::DefMap, blk: &ast::Block) -> freevar_info {
let seen = NodeSet::new(); let seen = NodeSet::new();
let refs = ~[]; let refs = Vec::new();
let mut v = CollectFreevarsVisitor { let mut v = CollectFreevarsVisitor {
seen: seen, seen: seen,

View File

@ -35,11 +35,11 @@ be indexed by the direction (see the type `Direction`).
*/ */
use std::uint; use std::uint;
use std::vec; use std::vec_ng::Vec;
pub struct Graph<N,E> { pub struct Graph<N,E> {
priv nodes: ~[Node<N>], priv nodes: Vec<Node<N>> ,
priv edges: ~[Edge<E>], priv edges: Vec<Edge<E>> ,
} }
pub struct Node<N> { pub struct Node<N> {
@ -77,13 +77,18 @@ impl EdgeIndex {
impl<N,E> Graph<N,E> { impl<N,E> Graph<N,E> {
pub fn new() -> Graph<N,E> { pub fn new() -> Graph<N,E> {
Graph {nodes: ~[], edges: ~[]} Graph {
nodes: Vec::new(),
edges: Vec::new(),
}
} }
pub fn with_capacity(num_nodes: uint, pub fn with_capacity(num_nodes: uint,
num_edges: uint) -> Graph<N,E> { num_edges: uint) -> Graph<N,E> {
Graph {nodes: vec::with_capacity(num_nodes), Graph {
edges: vec::with_capacity(num_edges)} nodes: Vec::with_capacity(num_nodes),
edges: Vec::with_capacity(num_edges),
}
} }
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////
@ -91,13 +96,13 @@ impl<N,E> Graph<N,E> {
#[inline] #[inline]
pub fn all_nodes<'a>(&'a self) -> &'a [Node<N>] { pub fn all_nodes<'a>(&'a self) -> &'a [Node<N>] {
let nodes: &'a [Node<N>] = self.nodes; let nodes: &'a [Node<N>] = self.nodes.as_slice();
nodes nodes
} }
#[inline] #[inline]
pub fn all_edges<'a>(&'a self) -> &'a [Edge<E>] { pub fn all_edges<'a>(&'a self) -> &'a [Edge<E>] {
let edges: &'a [Edge<E>] = self.edges; let edges: &'a [Edge<E>] = self.edges.as_slice();
edges edges
} }
@ -118,15 +123,15 @@ impl<N,E> Graph<N,E> {
} }
pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N { pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N {
&mut self.nodes[idx.get()].data &mut self.nodes.get_mut(idx.get()).data
} }
pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N { pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N {
&self.nodes[idx.get()].data &self.nodes.get(idx.get()).data
} }
pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> { pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> {
&self.nodes[idx.get()] self.nodes.get(idx.get())
} }
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////
@ -143,8 +148,10 @@ impl<N,E> Graph<N,E> {
let idx = self.next_edge_index(); let idx = self.next_edge_index();
// read current first of the list of edges from each node // read current first of the list of edges from each node
let source_first = self.nodes[source.get()].first_edge[Outgoing.repr]; let source_first = self.nodes.get(source.get())
let target_first = self.nodes[target.get()].first_edge[Incoming.repr]; .first_edge[Outgoing.repr];
let target_first = self.nodes.get(target.get())
.first_edge[Incoming.repr];
// create the new edge, with the previous firsts from each node // create the new edge, with the previous firsts from each node
// as the next pointers // as the next pointers
@ -156,22 +163,22 @@ impl<N,E> Graph<N,E> {
}); });
// adjust the firsts for each node target be the next object. // adjust the firsts for each node target be the next object.
self.nodes[source.get()].first_edge[Outgoing.repr] = idx; self.nodes.get_mut(source.get()).first_edge[Outgoing.repr] = idx;
self.nodes[target.get()].first_edge[Incoming.repr] = idx; self.nodes.get_mut(target.get()).first_edge[Incoming.repr] = idx;
return idx; return idx;
} }
pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E { pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E {
&mut self.edges[idx.get()].data &mut self.edges.get_mut(idx.get()).data
} }
pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E { pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E {
&self.edges[idx.get()].data &self.edges.get(idx.get()).data
} }
pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> { pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> {
&self.edges[idx.get()] self.edges.get(idx.get())
} }
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex { pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
@ -179,7 +186,7 @@ impl<N,E> Graph<N,E> {
//! This is useful if you wish to modify the graph while walking //! This is useful if you wish to modify the graph while walking
//! the linked list of edges. //! the linked list of edges.
self.nodes[node.get()].first_edge[dir.repr] self.nodes.get(node.get()).first_edge[dir.repr]
} }
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex { pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
@ -187,7 +194,7 @@ impl<N,E> Graph<N,E> {
//! This is useful if you wish to modify the graph while walking //! This is useful if you wish to modify the graph while walking
//! the linked list of edges. //! the linked list of edges.
self.edges[edge.get()].next_edge[dir.repr] self.edges.get(edge.get()).next_edge[dir.repr]
} }
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////
@ -231,7 +238,7 @@ impl<N,E> Graph<N,E> {
let mut edge_idx = self.first_adjacent(node, dir); let mut edge_idx = self.first_adjacent(node, dir);
while edge_idx != InvalidEdgeIndex { while edge_idx != InvalidEdgeIndex {
let edge = &self.edges[edge_idx.get()]; let edge = self.edges.get(edge_idx.get());
if !f(edge_idx, edge) { if !f(edge_idx, edge) {
return false; return false;
} }

View File

@ -16,6 +16,7 @@ use middle::typeck;
use util::ppaux::{Repr, ty_to_str}; use util::ppaux::{Repr, ty_to_str};
use util::ppaux::UserString; use util::ppaux::UserString;
use std::vec_ng::Vec;
use syntax::ast::*; use syntax::ast::*;
use syntax::attr; use syntax::attr;
use syntax::codemap::Span; use syntax::codemap::Span;
@ -94,7 +95,7 @@ fn check_struct_safe_for_destructor(cx: &mut Context,
let struct_ty = ty::mk_struct(cx.tcx, struct_did, ty::substs { let struct_ty = ty::mk_struct(cx.tcx, struct_did, ty::substs {
regions: ty::NonerasedRegions(opt_vec::Empty), regions: ty::NonerasedRegions(opt_vec::Empty),
self_ty: None, self_ty: None,
tps: ~[] tps: Vec::new()
}); });
if !ty::type_is_sendable(cx.tcx, struct_ty) { if !ty::type_is_sendable(cx.tcx, struct_ty) {
cx.tcx.sess.span_err(span, cx.tcx.sess.span_err(span,
@ -533,7 +534,7 @@ pub fn check_cast_for_escaping_regions(
// Collect up the regions that appear in the target type. We want to // Collect up the regions that appear in the target type. We want to
// ensure that these lifetimes are shorter than all lifetimes that are in // ensure that these lifetimes are shorter than all lifetimes that are in
// the source type. See test `src/test/compile-fail/regions-trait-2.rs` // the source type. See test `src/test/compile-fail/regions-trait-2.rs`
let mut target_regions = ~[]; let mut target_regions = Vec::new();
ty::walk_regions_and_ty( ty::walk_regions_and_ty(
cx.tcx, cx.tcx,
target_ty, target_ty,

View File

@ -33,6 +33,7 @@ use syntax::visit;
use collections::HashMap; use collections::HashMap;
use std::iter::Enumerate; use std::iter::Enumerate;
use std::vec; use std::vec;
use std::vec_ng::Vec;
// The actual lang items defined come at the end of this file in one handy table. // The actual lang items defined come at the end of this file in one handy table.
// So you probably just want to nip down to the end. // So you probably just want to nip down to the end.
@ -47,7 +48,7 @@ pub enum LangItem {
} }
pub struct LanguageItems { pub struct LanguageItems {
items: ~[Option<ast::DefId>], items: Vec<Option<ast::DefId>> ,
} }
impl LanguageItems { impl LanguageItems {
@ -55,7 +56,7 @@ impl LanguageItems {
fn foo(_: LangItem) -> Option<ast::DefId> { None } fn foo(_: LangItem) -> Option<ast::DefId> { None }
LanguageItems { LanguageItems {
items: ~[$(foo($variant)),*] items: vec!($(foo($variant)),*)
} }
} }
@ -72,10 +73,12 @@ impl LanguageItems {
} }
pub fn require(&self, it: LangItem) -> Result<ast::DefId, ~str> { pub fn require(&self, it: LangItem) -> Result<ast::DefId, ~str> {
match self.items[it as uint] { match self.items.get(it as uint) {
Some(id) => Ok(id), &Some(id) => Ok(id),
None => Err(format!("requires `{}` lang_item", &None => {
LanguageItems::item_name(it as uint))) Err(format!("requires `{}` lang_item",
LanguageItems::item_name(it as uint)))
}
} }
} }
@ -95,7 +98,7 @@ impl LanguageItems {
$( $(
pub fn $method(&self) -> Option<ast::DefId> { pub fn $method(&self) -> Option<ast::DefId> {
self.items[$variant as uint] *self.items.get($variant as uint)
} }
)* )*
} }
@ -147,18 +150,18 @@ impl LanguageItemCollector {
pub fn collect_item(&mut self, item_index: uint, item_def_id: ast::DefId) { pub fn collect_item(&mut self, item_index: uint, item_def_id: ast::DefId) {
// Check for duplicates. // Check for duplicates.
match self.items.items[item_index] { match self.items.items.get(item_index) {
Some(original_def_id) if original_def_id != item_def_id => { &Some(original_def_id) if original_def_id != item_def_id => {
self.session.err(format!("duplicate entry for `{}`", self.session.err(format!("duplicate entry for `{}`",
LanguageItems::item_name(item_index))); LanguageItems::item_name(item_index)));
} }
Some(_) | None => { &Some(_) | &None => {
// OK. // OK.
} }
} }
// Matched. // Matched.
self.items.items[item_index] = Some(item_def_id); *self.items.items.get_mut(item_index) = Some(item_def_id);
} }
pub fn collect_local_language_items(&mut self, krate: &ast::Crate) { pub fn collect_local_language_items(&mut self, krate: &ast::Crate) {

View File

@ -45,7 +45,6 @@ use middle::ty;
use middle::typeck::astconv::{ast_ty_to_ty, AstConv}; use middle::typeck::astconv::{ast_ty_to_ty, AstConv};
use middle::typeck::infer; use middle::typeck::infer;
use middle::typeck; use middle::typeck;
use std::to_str::ToStr;
use util::ppaux::{ty_to_str}; use util::ppaux::{ty_to_str};
use std::cmp; use std::cmp;
@ -54,10 +53,12 @@ use std::i16;
use std::i32; use std::i32;
use std::i64; use std::i64;
use std::i8; use std::i8;
use std::to_str::ToStr;
use std::u16; use std::u16;
use std::u32; use std::u32;
use std::u64; use std::u64;
use std::u8; use std::u8;
use std::vec_ng::Vec;
use collections::SmallIntMap; use collections::SmallIntMap;
use syntax::ast_map; use syntax::ast_map;
use syntax::ast_util::IdVisitingOperation; use syntax::ast_util::IdVisitingOperation;
@ -432,7 +433,7 @@ struct Context<'a> {
// When recursing into an attributed node of the ast which modifies lint // When recursing into an attributed node of the ast which modifies lint
// levels, this stack keeps track of the previous lint levels of whatever // levels, this stack keeps track of the previous lint levels of whatever
// was modified. // was modified.
lint_stack: ~[(Lint, level, LintSource)], lint_stack: Vec<(Lint, level, LintSource)> ,
// id of the last visited negated expression // id of the last visited negated expression
negated_expr_id: ast::NodeId negated_expr_id: ast::NodeId
@ -1091,7 +1092,7 @@ fn check_unused_result(cx: &Context, s: &ast::Stmt) {
} }
} else { } else {
csearch::get_item_attrs(cx.tcx.sess.cstore, did, |attrs| { csearch::get_item_attrs(cx.tcx.sess.cstore, did, |attrs| {
if attr::contains_name(attrs, "must_use") { if attr::contains_name(attrs.as_slice(), "must_use") {
cx.span_lint(UnusedMustUse, s.span, cx.span_lint(UnusedMustUse, s.span,
"unused result which must be used"); "unused result which must be used");
warned = true; warned = true;
@ -1738,7 +1739,7 @@ pub fn check_crate(tcx: ty::ctxt,
exported_items: exported_items, exported_items: exported_items,
cur_struct_def_id: -1, cur_struct_def_id: -1,
is_doc_hidden: false, is_doc_hidden: false,
lint_stack: ~[], lint_stack: Vec::new(),
negated_expr_id: -1 negated_expr_id: -1
}; };

View File

@ -116,7 +116,7 @@ use std::fmt;
use std::io; use std::io;
use std::str; use std::str;
use std::uint; use std::uint;
use std::vec; use std::vec_ng::Vec;
use syntax::ast::*; use syntax::ast::*;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token::special_idents; use syntax::parse::token::special_idents;
@ -260,9 +260,9 @@ pub struct IrMaps {
num_vars: Cell<uint>, num_vars: Cell<uint>,
live_node_map: RefCell<NodeMap<LiveNode>>, live_node_map: RefCell<NodeMap<LiveNode>>,
variable_map: RefCell<NodeMap<Variable>>, variable_map: RefCell<NodeMap<Variable>>,
capture_info_map: RefCell<NodeMap<@~[CaptureInfo]>>, capture_info_map: RefCell<NodeMap<@Vec<CaptureInfo> >>,
var_kinds: RefCell<~[VarKind]>, var_kinds: RefCell<Vec<VarKind> >,
lnks: RefCell<~[LiveNodeKind]>, lnks: RefCell<Vec<LiveNodeKind> >,
} }
fn IrMaps(tcx: ty::ctxt, fn IrMaps(tcx: ty::ctxt,
@ -278,8 +278,8 @@ fn IrMaps(tcx: ty::ctxt,
live_node_map: RefCell::new(NodeMap::new()), live_node_map: RefCell::new(NodeMap::new()),
variable_map: RefCell::new(NodeMap::new()), variable_map: RefCell::new(NodeMap::new()),
capture_info_map: RefCell::new(NodeMap::new()), capture_info_map: RefCell::new(NodeMap::new()),
var_kinds: RefCell::new(~[]), var_kinds: RefCell::new(Vec::new()),
lnks: RefCell::new(~[]), lnks: RefCell::new(Vec::new()),
} }
} }
@ -339,20 +339,20 @@ impl IrMaps {
pub fn variable_name(&self, var: Variable) -> ~str { pub fn variable_name(&self, var: Variable) -> ~str {
let var_kinds = self.var_kinds.borrow(); let var_kinds = self.var_kinds.borrow();
match var_kinds.get()[var.get()] { match var_kinds.get().get(var.get()) {
Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => { &Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => {
token::get_ident(nm).get().to_str() token::get_ident(nm).get().to_str()
}, },
ImplicitRet => ~"<implicit-ret>" &ImplicitRet => ~"<implicit-ret>"
} }
} }
pub fn set_captures(&self, node_id: NodeId, cs: ~[CaptureInfo]) { pub fn set_captures(&self, node_id: NodeId, cs: Vec<CaptureInfo> ) {
let mut capture_info_map = self.capture_info_map.borrow_mut(); let mut capture_info_map = self.capture_info_map.borrow_mut();
capture_info_map.get().insert(node_id, @cs); capture_info_map.get().insert(node_id, @cs);
} }
pub fn captures(&self, expr: &Expr) -> @~[CaptureInfo] { pub fn captures(&self, expr: &Expr) -> @Vec<CaptureInfo> {
let capture_info_map = self.capture_info_map.borrow(); let capture_info_map = self.capture_info_map.borrow();
match capture_info_map.get().find(&expr.id) { match capture_info_map.get().find(&expr.id) {
Some(&caps) => caps, Some(&caps) => caps,
@ -364,7 +364,7 @@ impl IrMaps {
pub fn lnk(&self, ln: LiveNode) -> LiveNodeKind { pub fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
let lnks = self.lnks.borrow(); let lnks = self.lnks.borrow();
lnks.get()[ln.get()] *lnks.get().get(ln.get())
} }
} }
@ -504,7 +504,7 @@ fn visit_expr(v: &mut LivenessVisitor, expr: &Expr, this: @IrMaps) {
// construction site. // construction site.
let capture_map = this.capture_map.borrow(); let capture_map = this.capture_map.borrow();
let cvs = capture_map.get().get(&expr.id); let cvs = capture_map.get().get(&expr.id);
let mut call_caps = ~[]; let mut call_caps = Vec::new();
for cv in cvs.borrow().iter() { for cv in cvs.borrow().iter() {
match moves::moved_variable_node_id_from_def(cv.def) { match moves::moved_variable_node_id_from_def(cv.def) {
Some(rv) => { Some(rv) => {
@ -590,11 +590,11 @@ pub struct Liveness {
tcx: ty::ctxt, tcx: ty::ctxt,
ir: @IrMaps, ir: @IrMaps,
s: Specials, s: Specials,
successors: @RefCell<~[LiveNode]>, successors: @RefCell<Vec<LiveNode> >,
users: @RefCell<~[Users]>, users: @RefCell<Vec<Users> >,
// The list of node IDs for the nested loop scopes // The list of node IDs for the nested loop scopes
// we're in. // we're in.
loop_scope: @RefCell<~[NodeId]>, loop_scope: @RefCell<Vec<NodeId> >,
// mappings from loop node ID to LiveNode // mappings from loop node ID to LiveNode
// ("break" label should map to loop node ID, // ("break" label should map to loop node ID,
// it probably doesn't now) // it probably doesn't now)
@ -607,12 +607,12 @@ fn Liveness(ir: @IrMaps, specials: Specials) -> Liveness {
ir: ir, ir: ir,
tcx: ir.tcx, tcx: ir.tcx,
s: specials, s: specials,
successors: @RefCell::new(vec::from_elem(ir.num_live_nodes.get(), successors: @RefCell::new(Vec::from_elem(ir.num_live_nodes.get(),
invalid_node())), invalid_node())),
users: @RefCell::new(vec::from_elem(ir.num_live_nodes.get() * users: @RefCell::new(Vec::from_elem(ir.num_live_nodes.get() *
ir.num_vars.get(), ir.num_vars.get(),
invalid_users())), invalid_users())),
loop_scope: @RefCell::new(~[]), loop_scope: @RefCell::new(Vec::new()),
break_ln: @RefCell::new(NodeMap::new()), break_ln: @RefCell::new(NodeMap::new()),
cont_ln: @RefCell::new(NodeMap::new()), cont_ln: @RefCell::new(NodeMap::new()),
} }
@ -686,7 +686,7 @@ impl Liveness {
-> Option<LiveNodeKind> { -> Option<LiveNodeKind> {
assert!(ln.is_valid()); assert!(ln.is_valid());
let users = self.users.borrow(); let users = self.users.borrow();
let reader = users.get()[self.idx(ln, var)].reader; let reader = users.get().get(self.idx(ln, var)).reader;
if reader.is_valid() {Some(self.ir.lnk(reader))} else {None} if reader.is_valid() {Some(self.ir.lnk(reader))} else {None}
} }
@ -697,7 +697,7 @@ impl Liveness {
-> Option<LiveNodeKind> { -> Option<LiveNodeKind> {
let successor = { let successor = {
let successors = self.successors.borrow(); let successors = self.successors.borrow();
successors.get()[ln.get()] *successors.get().get(ln.get())
}; };
self.live_on_entry(successor, var) self.live_on_entry(successor, var)
} }
@ -705,14 +705,14 @@ impl Liveness {
pub fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool { pub fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
assert!(ln.is_valid()); assert!(ln.is_valid());
let users = self.users.borrow(); let users = self.users.borrow();
users.get()[self.idx(ln, var)].used users.get().get(self.idx(ln, var)).used
} }
pub fn assigned_on_entry(&self, ln: LiveNode, var: Variable) pub fn assigned_on_entry(&self, ln: LiveNode, var: Variable)
-> Option<LiveNodeKind> { -> Option<LiveNodeKind> {
assert!(ln.is_valid()); assert!(ln.is_valid());
let users = self.users.borrow(); let users = self.users.borrow();
let writer = users.get()[self.idx(ln, var)].writer; let writer = users.get().get(self.idx(ln, var)).writer;
if writer.is_valid() {Some(self.ir.lnk(writer))} else {None} if writer.is_valid() {Some(self.ir.lnk(writer))} else {None}
} }
@ -720,7 +720,7 @@ impl Liveness {
-> Option<LiveNodeKind> { -> Option<LiveNodeKind> {
let successor = { let successor = {
let successors = self.successors.borrow(); let successors = self.successors.borrow();
successors.get()[ln.get()] *successors.get().get(ln.get())
}; };
self.assigned_on_entry(successor, var) self.assigned_on_entry(successor, var)
} }
@ -795,14 +795,14 @@ impl Liveness {
write!(wr, write!(wr,
"[ln({}) of kind {:?} reads", "[ln({}) of kind {:?} reads",
ln.get(), ln.get(),
lnks.and_then(|lnks| Some(lnks.get()[ln.get()]))); lnks.and_then(|lnks| Some(*lnks.get().get(ln.get()))));
} }
let users = self.users.try_borrow(); let users = self.users.try_borrow();
match users { match users {
Some(users) => { Some(users) => {
self.write_vars(wr, ln, |idx| users.get()[idx].reader); self.write_vars(wr, ln, |idx| users.get().get(idx).reader);
write!(wr, " writes"); write!(wr, " writes");
self.write_vars(wr, ln, |idx| users.get()[idx].writer); self.write_vars(wr, ln, |idx| users.get().get(idx).writer);
} }
None => { None => {
write!(wr, " (users borrowed)"); write!(wr, " (users borrowed)");
@ -811,7 +811,9 @@ impl Liveness {
let successors = self.successors.try_borrow(); let successors = self.successors.try_borrow();
match successors { match successors {
Some(successors) => { Some(successors) => {
write!(wr, " precedes {}]", successors.get()[ln.get()].to_str()); write!(wr,
" precedes {}]",
successors.get().get(ln.get()).to_str());
} }
None => { None => {
write!(wr, " precedes (successors borrowed)]"); write!(wr, " precedes (successors borrowed)]");
@ -824,7 +826,7 @@ impl Liveness {
pub fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) { pub fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) {
{ {
let mut successors = self.successors.borrow_mut(); let mut successors = self.successors.borrow_mut();
successors.get()[ln.get()] = succ_ln; *successors.get().get_mut(ln.get()) = succ_ln;
} }
// It is not necessary to initialize the // It is not necessary to initialize the
@ -841,12 +843,12 @@ impl Liveness {
// more efficient version of init_empty() / merge_from_succ() // more efficient version of init_empty() / merge_from_succ()
{ {
let mut successors = self.successors.borrow_mut(); let mut successors = self.successors.borrow_mut();
successors.get()[ln.get()] = succ_ln; *successors.get().get_mut(ln.get()) = succ_ln;
} }
self.indices2(ln, succ_ln, |idx, succ_idx| { self.indices2(ln, succ_ln, |idx, succ_idx| {
let mut users = self.users.borrow_mut(); let mut users = self.users.borrow_mut();
users.get()[idx] = users.get()[succ_idx] *users.get().get_mut(idx) = *users.get().get(succ_idx)
}); });
debug!("init_from_succ(ln={}, succ={})", debug!("init_from_succ(ln={}, succ={})",
self.ln_str(ln), self.ln_str(succ_ln)); self.ln_str(ln), self.ln_str(succ_ln));
@ -862,12 +864,12 @@ impl Liveness {
let mut changed = false; let mut changed = false;
self.indices2(ln, succ_ln, |idx, succ_idx| { self.indices2(ln, succ_ln, |idx, succ_idx| {
let mut users = self.users.borrow_mut(); let mut users = self.users.borrow_mut();
changed |= copy_if_invalid(users.get()[succ_idx].reader, changed |= copy_if_invalid(users.get().get(succ_idx).reader,
&mut users.get()[idx].reader); &mut users.get().get_mut(idx).reader);
changed |= copy_if_invalid(users.get()[succ_idx].writer, changed |= copy_if_invalid(users.get().get(succ_idx).writer,
&mut users.get()[idx].writer); &mut users.get().get_mut(idx).writer);
if users.get()[succ_idx].used && !users.get()[idx].used { if users.get().get(succ_idx).used && !users.get().get(idx).used {
users.get()[idx].used = true; users.get().get_mut(idx).used = true;
changed = true; changed = true;
} }
}); });
@ -893,8 +895,8 @@ impl Liveness {
pub fn define(&self, writer: LiveNode, var: Variable) { pub fn define(&self, writer: LiveNode, var: Variable) {
let idx = self.idx(writer, var); let idx = self.idx(writer, var);
let mut users = self.users.borrow_mut(); let mut users = self.users.borrow_mut();
users.get()[idx].reader = invalid_node(); users.get().get_mut(idx).reader = invalid_node();
users.get()[idx].writer = invalid_node(); users.get().get_mut(idx).writer = invalid_node();
debug!("{} defines {} (idx={}): {}", writer.to_str(), var.to_str(), debug!("{} defines {} (idx={}): {}", writer.to_str(), var.to_str(),
idx, self.ln_str(writer)); idx, self.ln_str(writer));
@ -904,7 +906,7 @@ impl Liveness {
pub fn acc(&self, ln: LiveNode, var: Variable, acc: uint) { pub fn acc(&self, ln: LiveNode, var: Variable, acc: uint) {
let idx = self.idx(ln, var); let idx = self.idx(ln, var);
let mut users = self.users.borrow_mut(); let mut users = self.users.borrow_mut();
let user = &mut users.get()[idx]; let user = users.get().get_mut(idx);
if (acc & ACC_WRITE) != 0 { if (acc & ACC_WRITE) != 0 {
user.reader = invalid_node(); user.reader = invalid_node();

View File

@ -65,6 +65,7 @@
use middle::ty; use middle::ty;
use util::ppaux::{ty_to_str, region_ptr_to_str, Repr}; use util::ppaux::{ty_to_str, region_ptr_to_str, Repr};
use std::vec_ng::Vec;
use syntax::ast::{MutImmutable, MutMutable}; use syntax::ast::{MutImmutable, MutMutable};
use syntax::ast; use syntax::ast;
use syntax::codemap::Span; use syntax::codemap::Span;
@ -723,7 +724,7 @@ impl<TYPER:Typer> MemCategorizationContext<TYPER> {
// know what type lies at the other end, so we just call it // know what type lies at the other end, so we just call it
// `()` (the empty tuple). // `()` (the empty tuple).
let opaque_ty = ty::mk_tup(self.tcx(), ~[]); let opaque_ty = ty::mk_tup(self.tcx(), Vec::new());
return self.cat_deref_common(node, base_cmt, deref_cnt, opaque_ty); return self.cat_deref_common(node, base_cmt, deref_cnt, opaque_ty);
} }

View File

@ -139,6 +139,7 @@ use util::nodemap::{NodeMap, NodeSet};
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use std::vec_ng::Vec;
use syntax::ast::*; use syntax::ast::*;
use syntax::ast_util; use syntax::ast_util;
use syntax::visit; use syntax::visit;
@ -159,7 +160,7 @@ pub struct CaptureVar {
mode: CaptureMode // How variable is being accessed mode: CaptureMode // How variable is being accessed
} }
pub type CaptureMap = @RefCell<NodeMap<Rc<~[CaptureVar]>>>; pub type CaptureMap = @RefCell<NodeMap<Rc<Vec<CaptureVar> >>>;
pub type MovesMap = @RefCell<NodeSet>; pub type MovesMap = @RefCell<NodeSet>;
@ -680,7 +681,7 @@ impl VisitContext {
self.consume_expr(arg_expr) self.consume_expr(arg_expr)
} }
pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> Rc<~[CaptureVar]> { pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> Rc<Vec<CaptureVar> > {
debug!("compute_capture_vars(fn_expr_id={:?})", fn_expr_id); debug!("compute_capture_vars(fn_expr_id={:?})", fn_expr_id);
let _indenter = indenter(); let _indenter = indenter();

View File

@ -12,6 +12,7 @@
use middle::resolve; use middle::resolve;
use collections::HashMap; use collections::HashMap;
use std::vec_ng::Vec;
use syntax::ast::*; use syntax::ast::*;
use syntax::ast_util::{path_to_ident, walk_pat}; use syntax::ast_util::{path_to_ident, walk_pat};
use syntax::codemap::Span; use syntax::codemap::Span;
@ -88,8 +89,8 @@ pub fn pat_bindings(dm: resolve::DefMap,
}); });
} }
pub fn pat_binding_ids(dm: resolve::DefMap, pat: &Pat) -> ~[NodeId] { pub fn pat_binding_ids(dm: resolve::DefMap, pat: &Pat) -> Vec<NodeId> {
let mut found = ~[]; let mut found = Vec::new();
pat_bindings(dm, pat, |_bm, b_id, _sp, _pt| found.push(b_id) ); pat_bindings(dm, pat, |_bm, b_id, _sp, _pt| found.push(b_id) );
return found; return found;
} }

View File

@ -21,6 +21,7 @@ use middle::privacy;
use util::nodemap::NodeSet; use util::nodemap::NodeSet;
use std::cell::RefCell; use std::cell::RefCell;
use std::vec_ng::Vec;
use collections::HashSet; use collections::HashSet;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
@ -92,11 +93,11 @@ struct ReachableContext {
reachable_symbols: @RefCell<NodeSet>, reachable_symbols: @RefCell<NodeSet>,
// A worklist of item IDs. Each item ID in this worklist will be inlined // A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references. // and will be scanned for further references.
worklist: @RefCell<~[ast::NodeId]>, worklist: @RefCell<Vec<ast::NodeId> >,
} }
struct MarkSymbolVisitor { struct MarkSymbolVisitor {
worklist: @RefCell<~[ast::NodeId]>, worklist: @RefCell<Vec<ast::NodeId> >,
method_map: typeck::MethodMap, method_map: typeck::MethodMap,
tcx: ty::ctxt, tcx: ty::ctxt,
reachable_symbols: @RefCell<NodeSet>, reachable_symbols: @RefCell<NodeSet>,
@ -190,7 +191,7 @@ impl ReachableContext {
tcx: tcx, tcx: tcx,
method_map: method_map, method_map: method_map,
reachable_symbols: @RefCell::new(NodeSet::new()), reachable_symbols: @RefCell::new(NodeSet::new()),
worklist: @RefCell::new(~[]), worklist: @RefCell::new(Vec::new()),
} }
} }

View File

@ -27,6 +27,7 @@ use middle::ty;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
use std::cell::RefCell; use std::cell::RefCell;
use std::vec_ng::Vec;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::{ast, visit}; use syntax::{ast, visit};
@ -77,7 +78,7 @@ The region maps encode information about region relationships.
pub struct RegionMaps { pub struct RegionMaps {
priv scope_map: RefCell<NodeMap<ast::NodeId>>, priv scope_map: RefCell<NodeMap<ast::NodeId>>,
priv var_map: RefCell<NodeMap<ast::NodeId>>, priv var_map: RefCell<NodeMap<ast::NodeId>>,
priv free_region_map: RefCell<HashMap<FreeRegion, ~[FreeRegion]>>, priv free_region_map: RefCell<HashMap<FreeRegion, Vec<FreeRegion> >>,
priv rvalue_scopes: RefCell<NodeMap<ast::NodeId>>, priv rvalue_scopes: RefCell<NodeMap<ast::NodeId>>,
priv terminating_scopes: RefCell<HashSet<ast::NodeId>>, priv terminating_scopes: RefCell<HashSet<ast::NodeId>>,
} }
@ -113,7 +114,7 @@ impl RegionMaps {
debug!("relate_free_regions(sub={:?}, sup={:?})", sub, sup); debug!("relate_free_regions(sub={:?}, sup={:?})", sub, sup);
free_region_map.get().insert(sub, ~[sup]); free_region_map.get().insert(sub, vec!(sup));
} }
pub fn record_encl_scope(&self, sub: ast::NodeId, sup: ast::NodeId) { pub fn record_encl_scope(&self, sub: ast::NodeId, sup: ast::NodeId) {
@ -283,11 +284,11 @@ impl RegionMaps {
// doubles as a way to detect if we've seen a particular FR // doubles as a way to detect if we've seen a particular FR
// before. Note that we expect this graph to be an *extremely // before. Note that we expect this graph to be an *extremely
// shallow* tree. // shallow* tree.
let mut queue = ~[sub]; let mut queue = vec!(sub);
let mut i = 0; let mut i = 0;
while i < queue.len() { while i < queue.len() {
let free_region_map = self.free_region_map.borrow(); let free_region_map = self.free_region_map.borrow();
match free_region_map.get().find(&queue[i]) { match free_region_map.get().find(queue.get(i)) {
Some(parents) => { Some(parents) => {
for parent in parents.iter() { for parent in parents.iter() {
if *parent == sup { if *parent == sup {
@ -369,7 +370,7 @@ impl RegionMaps {
// where they diverge. If one vector is a suffix of the other, // where they diverge. If one vector is a suffix of the other,
// then the corresponding scope is a superscope of the other. // then the corresponding scope is a superscope of the other.
if a_ancestors[a_index] != b_ancestors[b_index] { if *a_ancestors.get(a_index) != *b_ancestors.get(b_index) {
return None; return None;
} }
@ -380,16 +381,15 @@ impl RegionMaps {
if b_index == 0u { return Some(scope_b); } if b_index == 0u { return Some(scope_b); }
a_index -= 1u; a_index -= 1u;
b_index -= 1u; b_index -= 1u;
if a_ancestors[a_index] != b_ancestors[b_index] { if *a_ancestors.get(a_index) != *b_ancestors.get(b_index) {
return Some(a_ancestors[a_index + 1u]); return Some(*a_ancestors.get(a_index + 1u));
} }
} }
fn ancestors_of(this: &RegionMaps, scope: ast::NodeId) fn ancestors_of(this: &RegionMaps, scope: ast::NodeId)
-> ~[ast::NodeId] -> Vec<ast::NodeId> {
{
// debug!("ancestors_of(scope={})", scope); // debug!("ancestors_of(scope={})", scope);
let mut result = ~[scope]; let mut result = vec!(scope);
let mut scope = scope; let mut scope = scope;
loop { loop {
let scope_map = this.scope_map.borrow(); let scope_map = this.scope_map.borrow();

View File

@ -34,6 +34,7 @@ use syntax::visit::Visitor;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::uint; use std::uint;
use std::mem::replace; use std::mem::replace;
use std::vec_ng::Vec;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
// Definition mapping // Definition mapping
@ -48,11 +49,11 @@ struct binding_info {
type BindingMap = HashMap<Name,binding_info>; type BindingMap = HashMap<Name,binding_info>;
// Trait method resolution // Trait method resolution
pub type TraitMap = NodeMap<~[DefId]>; pub type TraitMap = NodeMap<Vec<DefId> >;
// This is the replacement export map. It maps a module to all of the exports // This is the replacement export map. It maps a module to all of the exports
// within. // within.
pub type ExportMap2 = @RefCell<NodeMap<~[Export2]>>; pub type ExportMap2 = @RefCell<NodeMap<Vec<Export2> >>;
pub struct Export2 { pub struct Export2 {
name: ~str, // The name of the target. name: ~str, // The name of the target.
@ -319,7 +320,7 @@ impl Rib {
/// One import directive. /// One import directive.
struct ImportDirective { struct ImportDirective {
module_path: ~[Ident], module_path: Vec<Ident> ,
subclass: @ImportDirectiveSubclass, subclass: @ImportDirectiveSubclass,
span: Span, span: Span,
id: NodeId, id: NodeId,
@ -327,7 +328,7 @@ struct ImportDirective {
} }
impl ImportDirective { impl ImportDirective {
fn new(module_path: ~[Ident], fn new(module_path: Vec<Ident> ,
subclass: @ImportDirectiveSubclass, subclass: @ImportDirectiveSubclass,
span: Span, span: Span,
id: NodeId, id: NodeId,
@ -438,7 +439,7 @@ struct Module {
is_public: bool, is_public: bool,
children: RefCell<HashMap<Name, @NameBindings>>, children: RefCell<HashMap<Name, @NameBindings>>,
imports: RefCell<~[@ImportDirective]>, imports: RefCell<Vec<@ImportDirective> >,
// The external module children of this node that were declared with // The external module children of this node that were declared with
// `extern crate`. // `extern crate`.
@ -488,7 +489,7 @@ impl Module {
kind: Cell::new(kind), kind: Cell::new(kind),
is_public: is_public, is_public: is_public,
children: RefCell::new(HashMap::new()), children: RefCell::new(HashMap::new()),
imports: RefCell::new(~[]), imports: RefCell::new(Vec::new()),
external_module_children: RefCell::new(HashMap::new()), external_module_children: RefCell::new(HashMap::new()),
anonymous_children: RefCell::new(NodeMap::new()), anonymous_children: RefCell::new(NodeMap::new()),
import_resolutions: RefCell::new(HashMap::new()), import_resolutions: RefCell::new(HashMap::new()),
@ -815,9 +816,9 @@ fn Resolver(session: Session,
unresolved_imports: 0, unresolved_imports: 0,
current_module: current_module, current_module: current_module,
value_ribs: @RefCell::new(~[]), value_ribs: @RefCell::new(Vec::new()),
type_ribs: @RefCell::new(~[]), type_ribs: @RefCell::new(Vec::new()),
label_ribs: @RefCell::new(~[]), label_ribs: @RefCell::new(Vec::new()),
current_trait_refs: None, current_trait_refs: None,
@ -826,7 +827,7 @@ fn Resolver(session: Session,
primitive_type_table: @PrimitiveTypeTable(), primitive_type_table: @PrimitiveTypeTable(),
namespaces: ~[ TypeNS, ValueNS ], namespaces: vec!(TypeNS, ValueNS),
def_map: @RefCell::new(NodeMap::new()), def_map: @RefCell::new(NodeMap::new()),
export_map2: @RefCell::new(NodeMap::new()), export_map2: @RefCell::new(NodeMap::new()),
@ -859,16 +860,16 @@ struct Resolver {
// The current set of local scopes, for values. // The current set of local scopes, for values.
// FIXME #4948: Reuse ribs to avoid allocation. // FIXME #4948: Reuse ribs to avoid allocation.
value_ribs: @RefCell<~[@Rib]>, value_ribs: @RefCell<Vec<@Rib> >,
// The current set of local scopes, for types. // The current set of local scopes, for types.
type_ribs: @RefCell<~[@Rib]>, type_ribs: @RefCell<Vec<@Rib> >,
// The current set of local scopes, for labels. // The current set of local scopes, for labels.
label_ribs: @RefCell<~[@Rib]>, label_ribs: @RefCell<Vec<@Rib> >,
// The trait that the current context can refer to. // The trait that the current context can refer to.
current_trait_refs: Option<~[DefId]>, current_trait_refs: Option<Vec<DefId> >,
// The ident for the keyword "self". // The ident for the keyword "self".
self_ident: Ident, self_ident: Ident,
@ -879,7 +880,7 @@ struct Resolver {
primitive_type_table: @PrimitiveTypeTable, primitive_type_table: @PrimitiveTypeTable,
// The four namespaces. // The four namespaces.
namespaces: ~[Namespace], namespaces: Vec<Namespace> ,
def_map: DefMap, def_map: DefMap,
export_map2: ExportMap2, export_map2: ExportMap2,
@ -1452,7 +1453,7 @@ impl Resolver {
// globs and lists, the path is found directly in the AST; // globs and lists, the path is found directly in the AST;
// for simple paths we have to munge the path a little. // for simple paths we have to munge the path a little.
let mut module_path = ~[]; let mut module_path = Vec::new();
match view_path.node { match view_path.node {
ViewPathSimple(_, ref full_path, _) => { ViewPathSimple(_, ref full_path, _) => {
let path_len = full_path.segments.len(); let path_len = full_path.segments.len();
@ -1951,7 +1952,7 @@ impl Resolver {
/// Creates and adds an import directive to the given module. /// Creates and adds an import directive to the given module.
fn build_import_directive(&mut self, fn build_import_directive(&mut self,
module_: @Module, module_: @Module,
module_path: ~[Ident], module_path: Vec<Ident> ,
subclass: @ImportDirectiveSubclass, subclass: @ImportDirectiveSubclass,
span: Span, span: Span,
id: NodeId, id: NodeId,
@ -1972,7 +1973,7 @@ impl Resolver {
SingleImport(target, _) => { SingleImport(target, _) => {
debug!("(building import directive) building import \ debug!("(building import directive) building import \
directive: {}::{}", directive: {}::{}",
self.idents_to_str(directive.module_path), self.idents_to_str(directive.module_path.as_slice()),
token::get_ident(target)); token::get_ident(target));
let mut import_resolutions = module_.import_resolutions let mut import_resolutions = module_.import_resolutions
@ -2085,13 +2086,14 @@ impl Resolver {
let import_count = imports.get().len(); let import_count = imports.get().len();
while module.resolved_import_count.get() < import_count { while module.resolved_import_count.get() < import_count {
let import_index = module.resolved_import_count.get(); let import_index = module.resolved_import_count.get();
let import_directive = imports.get()[import_index]; let import_directive = *imports.get().get(import_index);
match self.resolve_import_for_module(module, import_directive) { match self.resolve_import_for_module(module, import_directive) {
Failed => { Failed => {
// We presumably emitted an error. Continue. // We presumably emitted an error. Continue.
let msg = format!("failed to resolve import `{}`", let msg = format!("failed to resolve import `{}`",
self.import_path_to_str( self.import_path_to_str(
import_directive.module_path, import_directive.module_path
.as_slice(),
*import_directive.subclass)); *import_directive.subclass));
self.resolve_error(import_directive.span, msg); self.resolve_error(import_directive.span, msg);
} }
@ -2124,11 +2126,11 @@ impl Resolver {
} }
fn path_idents_to_str(&mut self, path: &Path) -> ~str { fn path_idents_to_str(&mut self, path: &Path) -> ~str {
let identifiers: ~[ast::Ident] = path.segments let identifiers: Vec<ast::Ident> = path.segments
.iter() .iter()
.map(|seg| seg.identifier) .map(|seg| seg.identifier)
.collect(); .collect();
self.idents_to_str(identifiers) self.idents_to_str(identifiers.as_slice())
} }
fn import_directive_subclass_to_str(&mut self, fn import_directive_subclass_to_str(&mut self,
@ -2169,7 +2171,7 @@ impl Resolver {
debug!("(resolving import for module) resolving import `{}::...` in \ debug!("(resolving import for module) resolving import `{}::...` in \
`{}`", `{}`",
self.idents_to_str(*module_path), self.idents_to_str(module_path.as_slice()),
self.module_to_str(module_)); self.module_to_str(module_));
// First, resolve the module path for the directive, if necessary. // First, resolve the module path for the directive, if necessary.
@ -2178,7 +2180,7 @@ impl Resolver {
Some((self.graph_root.get_module(), LastMod(AllPublic))) Some((self.graph_root.get_module(), LastMod(AllPublic)))
} else { } else {
match self.resolve_module_path(module_, match self.resolve_module_path(module_,
*module_path, module_path.as_slice(),
DontUseLexicalScope, DontUseLexicalScope,
import_directive.span, import_directive.span,
ImportSearch) { ImportSearch) {
@ -3274,15 +3276,15 @@ impl Resolver {
if index != import_count { if index != import_count {
let sn = self.session let sn = self.session
.codemap .codemap
.span_to_snippet(imports.get()[index].span) .span_to_snippet(imports.get().get(index).span)
.unwrap(); .unwrap();
if sn.contains("::") { if sn.contains("::") {
self.resolve_error(imports.get()[index].span, self.resolve_error(imports.get().get(index).span,
"unresolved import"); "unresolved import");
} else { } else {
let err = format!("unresolved import (maybe you meant `{}::*`?)", let err = format!("unresolved import (maybe you meant `{}::*`?)",
sn.slice(0, sn.len())); sn.slice(0, sn.len()));
self.resolve_error(imports.get()[index].span, err); self.resolve_error(imports.get().get(index).span, err);
} }
} }
@ -3374,7 +3376,7 @@ impl Resolver {
} }
fn record_exports_for_module(&mut self, module_: @Module) { fn record_exports_for_module(&mut self, module_: @Module) {
let mut exports2 = ~[]; let mut exports2 = Vec::new();
self.add_exports_for_module(&mut exports2, module_); self.add_exports_for_module(&mut exports2, module_);
match module_.def_id.get() { match module_.def_id.get() {
@ -3389,7 +3391,7 @@ impl Resolver {
} }
fn add_exports_of_namebindings(&mut self, fn add_exports_of_namebindings(&mut self,
exports2: &mut ~[Export2], exports2: &mut Vec<Export2> ,
name: Name, name: Name,
namebindings: @NameBindings, namebindings: @NameBindings,
ns: Namespace) { ns: Namespace) {
@ -3410,7 +3412,7 @@ impl Resolver {
} }
fn add_exports_for_module(&mut self, fn add_exports_for_module(&mut self,
exports2: &mut ~[Export2], exports2: &mut Vec<Export2> ,
module_: @Module) { module_: @Module) {
let import_resolutions = module_.import_resolutions.borrow(); let import_resolutions = module_.import_resolutions.borrow();
for (name, importresolution) in import_resolutions.get().iter() { for (name, importresolution) in import_resolutions.get().iter() {
@ -3495,7 +3497,7 @@ impl Resolver {
/// Wraps the given definition in the appropriate number of `def_upvar` /// Wraps the given definition in the appropriate number of `def_upvar`
/// wrappers. /// wrappers.
fn upvarify(&mut self, fn upvarify(&mut self,
ribs: &mut ~[@Rib], ribs: &mut Vec<@Rib> ,
rib_index: uint, rib_index: uint,
def_like: DefLike, def_like: DefLike,
span: Span) span: Span)
@ -3520,7 +3522,7 @@ impl Resolver {
let mut rib_index = rib_index + 1; let mut rib_index = rib_index + 1;
while rib_index < ribs.len() { while rib_index < ribs.len() {
match ribs[rib_index].kind { match ribs.get(rib_index).kind {
NormalRibKind => { NormalRibKind => {
// Nothing to do. Continue. // Nothing to do. Continue.
} }
@ -3610,7 +3612,7 @@ impl Resolver {
} }
fn search_ribs(&mut self, fn search_ribs(&mut self,
ribs: &mut ~[@Rib], ribs: &mut Vec<@Rib> ,
name: Name, name: Name,
span: Span) span: Span)
-> Option<DefLike> { -> Option<DefLike> {
@ -3621,7 +3623,7 @@ impl Resolver {
while i != 0 { while i != 0 {
i -= 1; i -= 1;
let binding_opt = { let binding_opt = {
let bindings = ribs[i].bindings.borrow(); let bindings = ribs.get(i).bindings.borrow();
bindings.get().find_copy(&name) bindings.get().find_copy(&name)
}; };
match binding_opt { match binding_opt {
@ -4095,7 +4097,7 @@ impl Resolver {
TraitImplementation); TraitImplementation);
// Record the current set of trait references. // Record the current set of trait references.
let mut new_trait_refs = ~[]; let mut new_trait_refs = Vec::new();
{ {
let def_map = this.def_map.borrow(); let def_map = this.def_map.borrow();
let r = def_map.get().find(&trait_reference.ref_id); let r = def_map.get().find(&trait_reference.ref_id);
@ -4492,8 +4494,9 @@ impl Resolver {
{ {
let mut value_ribs = let mut value_ribs =
this.value_ribs.borrow_mut(); this.value_ribs.borrow_mut();
let last_rib = value_ribs.get()[ let length = value_ribs.get().len();
value_ribs.get().len() - 1]; let last_rib = value_ribs.get().get(
length - 1);
let mut bindings = let mut bindings =
last_rib.bindings.borrow_mut(); last_rib.bindings.borrow_mut();
bindings.get().insert(renamed, bindings.get().insert(renamed,
@ -4518,8 +4521,9 @@ impl Resolver {
{ {
let mut value_ribs = let mut value_ribs =
this.value_ribs.borrow_mut(); this.value_ribs.borrow_mut();
let last_rib = value_ribs.get()[ let length = value_ribs.get().len();
value_ribs.get().len() - 1]; let last_rib = value_ribs.get().get(
length - 1);
let mut bindings = let mut bindings =
last_rib.bindings.borrow_mut(); last_rib.bindings.borrow_mut();
bindings.get().insert(renamed, bindings.get().insert(renamed,
@ -5054,8 +5058,8 @@ impl Resolver {
-> Option<~str> { -> Option<~str> {
let this = &mut *self; let this = &mut *self;
let mut maybes: ~[token::InternedString] = ~[]; let mut maybes: Vec<token::InternedString> = Vec::new();
let mut values: ~[uint] = ~[]; let mut values: Vec<uint> = Vec::new();
let mut j = { let mut j = {
let value_ribs = this.value_ribs.borrow(); let value_ribs = this.value_ribs.borrow();
@ -5064,7 +5068,7 @@ impl Resolver {
while j != 0 { while j != 0 {
j -= 1; j -= 1;
let value_ribs = this.value_ribs.borrow(); let value_ribs = this.value_ribs.borrow();
let bindings = value_ribs.get()[j].bindings.borrow(); let bindings = value_ribs.get().get(j).bindings.borrow();
for (&k, _) in bindings.get().iter() { for (&k, _) in bindings.get().iter() {
maybes.push(token::get_name(k)); maybes.push(token::get_name(k));
values.push(uint::MAX); values.push(uint::MAX);
@ -5073,20 +5077,20 @@ impl Resolver {
let mut smallest = 0; let mut smallest = 0;
for (i, other) in maybes.iter().enumerate() { for (i, other) in maybes.iter().enumerate() {
values[i] = name.lev_distance(other.get()); *values.get_mut(i) = name.lev_distance(other.get());
if values[i] <= values[smallest] { if *values.get(i) <= *values.get(smallest) {
smallest = i; smallest = i;
} }
} }
if values.len() > 0 && if values.len() > 0 &&
values[smallest] != uint::MAX && *values.get(smallest) != uint::MAX &&
values[smallest] < name.len() + 2 && *values.get(smallest) < name.len() + 2 &&
values[smallest] <= max_distance && *values.get(smallest) <= max_distance &&
name != maybes[smallest].get() { name != maybes.get(smallest).get() {
Some(maybes[smallest].get().to_str()) Some(maybes.get(smallest).get().to_str())
} else { } else {
None None
@ -5212,8 +5216,8 @@ impl Resolver {
let def_like = DlDef(DefLabel(expr.id)); let def_like = DlDef(DefLabel(expr.id));
{ {
let mut label_ribs = this.label_ribs.borrow_mut(); let mut label_ribs = this.label_ribs.borrow_mut();
let rib = label_ribs.get()[label_ribs.get().len() - let length = label_ribs.get().len();
1]; let rib = label_ribs.get().get(length - 1);
let mut bindings = rib.bindings.borrow_mut(); let mut bindings = rib.bindings.borrow_mut();
let renamed = mtwt::resolve(label); let renamed = mtwt::resolve(label);
bindings.get().insert(renamed, def_like); bindings.get().insert(renamed, def_like);
@ -5274,11 +5278,11 @@ impl Resolver {
} }
} }
fn search_for_traits_containing_method(&mut self, name: Ident) -> ~[DefId] { fn search_for_traits_containing_method(&mut self, name: Ident) -> Vec<DefId> {
debug!("(searching for traits containing method) looking for '{}'", debug!("(searching for traits containing method) looking for '{}'",
token::get_ident(name)); token::get_ident(name));
let mut found_traits = ~[]; let mut found_traits = Vec::new();
let mut search_module = self.current_module; let mut search_module = self.current_module;
let method_map = self.method_map.borrow(); let method_map = self.method_map.borrow();
match method_map.get().find(&name.name) { match method_map.get().find(&name.name) {
@ -5350,7 +5354,7 @@ impl Resolver {
} }
fn add_trait_info(&self, fn add_trait_info(&self,
found_traits: &mut ~[DefId], found_traits: &mut Vec<DefId> ,
trait_def_id: DefId, trait_def_id: DefId,
name: Ident) { name: Ident) {
debug!("(adding trait info) found trait {}:{} for method '{}'", debug!("(adding trait info) found trait {}:{} for method '{}'",
@ -5495,7 +5499,7 @@ impl Resolver {
/// A somewhat inefficient routine to obtain the name of a module. /// A somewhat inefficient routine to obtain the name of a module.
fn module_to_str(&mut self, module_: @Module) -> ~str { fn module_to_str(&mut self, module_: @Module) -> ~str {
let mut idents = ~[]; let mut idents = Vec::new();
let mut current_module = module_; let mut current_module = module_;
loop { loop {
match current_module.parent_link { match current_module.parent_link {
@ -5516,7 +5520,9 @@ impl Resolver {
if idents.len() == 0 { if idents.len() == 0 {
return ~"???"; return ~"???";
} }
return self.idents_to_str(idents.move_rev_iter().collect::<~[ast::Ident]>()); return self.idents_to_str(idents.move_rev_iter()
.collect::<Vec<ast::Ident>>()
.as_slice());
} }
#[allow(dead_code)] // useful for debugging #[allow(dead_code)] // useful for debugging

View File

@ -16,6 +16,7 @@ use middle::ty_fold::TypeFolder;
use util::ppaux::Repr; use util::ppaux::Repr;
use std::rc::Rc; use std::rc::Rc;
use std::vec_ng::Vec;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::opt_vec::OptVec; use syntax::opt_vec::OptVec;
@ -88,7 +89,7 @@ impl<'a> TypeFolder for SubstFolder<'a> {
match ty::get(t).sty { match ty::get(t).sty {
ty::ty_param(p) => { ty::ty_param(p) => {
if p.idx < self.substs.tps.len() { if p.idx < self.substs.tps.len() {
self.substs.tps[p.idx] *self.substs.tps.get(p.idx)
} else { } else {
let root_msg = match self.root_ty { let root_msg = match self.root_ty {
Some(root) => format!(" in the substitution of `{}`", Some(root) => format!(" in the substitution of `{}`",
@ -130,10 +131,10 @@ impl<'a> TypeFolder for SubstFolder<'a> {
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////
// Other types // Other types
impl<T:Subst> Subst for ~[T] { impl<T:Subst> Subst for Vec<T> {
fn subst_spanned(&self, tcx: ty::ctxt, fn subst_spanned(&self, tcx: ty::ctxt,
substs: &ty::substs, substs: &ty::substs,
span: Option<Span>) -> ~[T] { span: Option<Span>) -> Vec<T> {
self.map(|t| t.subst_spanned(tcx, substs, span)) self.map(|t| t.subst_spanned(tcx, substs, span))
} }
} }

View File

@ -223,9 +223,10 @@ use middle::ty;
use util::common::indenter; use util::common::indenter;
use util::ppaux::{Repr, vec_map_to_str}; use util::ppaux::{Repr, vec_map_to_str};
use std::cell::Cell;
use collections::HashMap; use collections::HashMap;
use std::vec; use std::cell::Cell;
use std::vec_ng::Vec;
use std::vec_ng;
use syntax::ast; use syntax::ast;
use syntax::ast::Ident; use syntax::ast::Ident;
use syntax::ast_util::path_to_ident; use syntax::ast_util::path_to_ident;
@ -421,10 +422,9 @@ impl<'a,'b> Clone for ArmData<'a, 'b> {
*/ */
#[deriving(Clone)] #[deriving(Clone)]
struct Match<'a,'b> { struct Match<'a,'b> {
pats: ~[@ast::Pat], pats: Vec<@ast::Pat> ,
data: ArmData<'a,'b>, data: ArmData<'a,'b>,
bound_ptrs: ~[(Ident, ValueRef)] bound_ptrs: Vec<(Ident, ValueRef)> }
}
impl<'a,'b> Repr for Match<'a,'b> { impl<'a,'b> Repr for Match<'a,'b> {
fn repr(&self, tcx: ty::ctxt) -> ~str { fn repr(&self, tcx: ty::ctxt) -> ~str {
@ -439,9 +439,9 @@ impl<'a,'b> Repr for Match<'a,'b> {
fn has_nested_bindings(m: &[Match], col: uint) -> bool { fn has_nested_bindings(m: &[Match], col: uint) -> bool {
for br in m.iter() { for br in m.iter() {
match br.pats[col].node { match br.pats.get(col).node {
ast::PatIdent(_, _, Some(_)) => return true, ast::PatIdent(_, _, Some(_)) => return true,
_ => () _ => ()
} }
} }
return false; return false;
@ -452,7 +452,7 @@ fn expand_nested_bindings<'r,'b>(
m: &[Match<'r,'b>], m: &[Match<'r,'b>],
col: uint, col: uint,
val: ValueRef) val: ValueRef)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("expand_nested_bindings(bcx={}, m={}, col={}, val={})", debug!("expand_nested_bindings(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -460,14 +460,14 @@ fn expand_nested_bindings<'r,'b>(
bcx.val_to_str(val)); bcx.val_to_str(val));
let _indenter = indenter(); let _indenter = indenter();
m.map(|br| { m.iter().map(|br| {
match br.pats[col].node { match br.pats.get(col).node {
ast::PatIdent(_, ref path, Some(inner)) => { ast::PatIdent(_, ref path, Some(inner)) => {
let pats = vec::append( let pats = vec_ng::append(
br.pats.slice(0u, col).to_owned(), Vec::from_slice(br.pats.slice(0u, col)),
vec::append(~[inner], vec_ng::append(vec!(inner),
br.pats.slice(col + 1u, br.pats.slice(col + 1u,
br.pats.len()))); br.pats.len())).as_slice());
let mut res = Match { let mut res = Match {
pats: pats, pats: pats,
@ -479,7 +479,7 @@ fn expand_nested_bindings<'r,'b>(
} }
_ => (*br).clone(), _ => (*br).clone(),
} }
}) }).collect()
} }
fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) { fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) {
@ -491,7 +491,7 @@ fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) {
} }
} }
type enter_pat<'a> = 'a |@ast::Pat| -> Option<~[@ast::Pat]>; type enter_pat<'a> = 'a |@ast::Pat| -> Option<Vec<@ast::Pat> >;
fn enter_match<'r,'b>( fn enter_match<'r,'b>(
bcx: &'b Block<'b>, bcx: &'b Block<'b>,
@ -500,7 +500,7 @@ fn enter_match<'r,'b>(
col: uint, col: uint,
val: ValueRef, val: ValueRef,
e: enter_pat) e: enter_pat)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_match(bcx={}, m={}, col={}, val={})", debug!("enter_match(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -508,16 +508,16 @@ fn enter_match<'r,'b>(
bcx.val_to_str(val)); bcx.val_to_str(val));
let _indenter = indenter(); let _indenter = indenter();
let mut result = ~[]; let mut result = Vec::new();
for br in m.iter() { for br in m.iter() {
match e(br.pats[col]) { match e(*br.pats.get(col)) {
Some(sub) => { Some(sub) => {
let pats = let pats =
vec::append( vec_ng::append(
vec::append(sub, br.pats.slice(0u, col)), vec_ng::append(sub, br.pats.slice(0u, col)),
br.pats.slice(col + 1u, br.pats.len())); br.pats.slice(col + 1u, br.pats.len()));
let this = br.pats[col]; let this = *br.pats.get(col);
let mut bound_ptrs = br.bound_ptrs.clone(); let mut bound_ptrs = br.bound_ptrs.clone();
match this.node { match this.node {
ast::PatIdent(_, ref path, None) => { ast::PatIdent(_, ref path, None) => {
@ -550,7 +550,7 @@ fn enter_default<'r,'b>(
col: uint, col: uint,
val: ValueRef, val: ValueRef,
chk: &FailureHandler) chk: &FailureHandler)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_default(bcx={}, m={}, col={}, val={})", debug!("enter_default(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -561,8 +561,8 @@ fn enter_default<'r,'b>(
// Collect all of the matches that can match against anything. // Collect all of the matches that can match against anything.
let matches = enter_match(bcx, dm, m, col, val, |p| { let matches = enter_match(bcx, dm, m, col, val, |p| {
match p.node { match p.node {
ast::PatWild | ast::PatWildMulti | ast::PatTup(_) => Some(~[]), ast::PatWild | ast::PatWildMulti | ast::PatTup(_) => Some(Vec::new()),
ast::PatIdent(_, _, None) if pat_is_binding(dm, p) => Some(~[]), ast::PatIdent(_, _, None) if pat_is_binding(dm, p) => Some(Vec::new()),
_ => None _ => None
} }
}); });
@ -587,7 +587,7 @@ fn enter_default<'r,'b>(
_ => false _ => false
}; };
if is_exhaustive { ~[] } else { matches } if is_exhaustive { Vec::new() } else { matches }
} }
// <pcwalton> nmatsakis: what does enter_opt do? // <pcwalton> nmatsakis: what does enter_opt do?
@ -621,7 +621,7 @@ fn enter_opt<'r,'b>(
col: uint, col: uint,
variant_size: uint, variant_size: uint,
val: ValueRef) val: ValueRef)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})", debug!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -643,7 +643,7 @@ fn enter_opt<'r,'b>(
}; };
let const_def_id = ast_util::def_id_of_def(const_def); let const_def_id = ast_util::def_id_of_def(const_def);
if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) { if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) {
Some(~[]) Some(Vec::new())
} else { } else {
None None
} }
@ -652,7 +652,7 @@ fn enter_opt<'r,'b>(
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) { if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
// FIXME: Must we clone? // FIXME: Must we clone?
match *subpats { match *subpats {
None => Some(vec::from_elem(variant_size, dummy)), None => Some(Vec::from_elem(variant_size, dummy)),
Some(ref subpats) => { Some(ref subpats) => {
Some((*subpats).iter().map(|x| *x).collect()) Some((*subpats).iter().map(|x| *x).collect())
} }
@ -664,16 +664,16 @@ fn enter_opt<'r,'b>(
ast::PatIdent(_, _, None) ast::PatIdent(_, _, None)
if pat_is_variant_or_struct(tcx.def_map, p) => { if pat_is_variant_or_struct(tcx.def_map, p) => {
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) { if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
Some(~[]) Some(Vec::new())
} else { } else {
None None
} }
} }
ast::PatLit(l) => { ast::PatLit(l) => {
if opt_eq(tcx, &lit(ExprLit(l)), opt) {Some(~[])} else {None} if opt_eq(tcx, &lit(ExprLit(l)), opt) {Some(Vec::new())} else {None}
} }
ast::PatRange(l1, l2) => { ast::PatRange(l1, l2) => {
if opt_eq(tcx, &range(l1, l2), opt) {Some(~[])} else {None} if opt_eq(tcx, &range(l1, l2), opt) {Some(Vec::new())} else {None}
} }
ast::PatStruct(_, ref field_pats, _) => { ast::PatStruct(_, ref field_pats, _) => {
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) { if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
@ -695,7 +695,7 @@ fn enter_opt<'r,'b>(
// Reorder the patterns into the same order they were // Reorder the patterns into the same order they were
// specified in the struct definition. Also fill in // specified in the struct definition. Also fill in
// unspecified fields with dummy. // unspecified fields with dummy.
let mut reordered_patterns = ~[]; let mut reordered_patterns = Vec::new();
let r = ty::lookup_struct_fields(tcx, struct_id); let r = ty::lookup_struct_fields(tcx, struct_id);
for field in r.iter() { for field in r.iter() {
match field_pats.iter().find(|p| p.ident.name match field_pats.iter().find(|p| p.ident.name
@ -722,7 +722,7 @@ fn enter_opt<'r,'b>(
let this_opt = vec_len(n, vec_len_ge(before.len()), let this_opt = vec_len(n, vec_len_ge(before.len()),
(lo, hi)); (lo, hi));
if opt_eq(tcx, &this_opt, opt) { if opt_eq(tcx, &this_opt, opt) {
let mut new_before = ~[]; let mut new_before = Vec::new();
for pat in before.iter() { for pat in before.iter() {
new_before.push(*pat); new_before.push(*pat);
} }
@ -738,7 +738,7 @@ fn enter_opt<'r,'b>(
None if i >= lo && i <= hi => { None if i >= lo && i <= hi => {
let n = before.len(); let n = before.len();
if opt_eq(tcx, &vec_len(n, vec_len_eq, (lo,hi)), opt) { if opt_eq(tcx, &vec_len(n, vec_len_eq, (lo,hi)), opt) {
let mut new_before = ~[]; let mut new_before = Vec::new();
for pat in before.iter() { for pat in before.iter() {
new_before.push(*pat); new_before.push(*pat);
} }
@ -762,7 +762,7 @@ fn enter_opt<'r,'b>(
// cause the default match to fire spuriously. // cause the default match to fire spuriously.
match *opt { match *opt {
vec_len(..) => None, vec_len(..) => None,
_ => Some(vec::from_elem(variant_size, dummy)) _ => Some(Vec::from_elem(variant_size, dummy))
} }
} }
}; };
@ -778,7 +778,7 @@ fn enter_rec_or_struct<'r,'b>(
col: uint, col: uint,
fields: &[ast::Ident], fields: &[ast::Ident],
val: ValueRef) val: ValueRef)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_rec_or_struct(bcx={}, m={}, col={}, val={})", debug!("enter_rec_or_struct(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -790,7 +790,7 @@ fn enter_rec_or_struct<'r,'b>(
enter_match(bcx, dm, m, col, val, |p| { enter_match(bcx, dm, m, col, val, |p| {
match p.node { match p.node {
ast::PatStruct(_, ref fpats, _) => { ast::PatStruct(_, ref fpats, _) => {
let mut pats = ~[]; let mut pats = Vec::new();
for fname in fields.iter() { for fname in fields.iter() {
match fpats.iter().find(|p| p.ident.name == fname.name) { match fpats.iter().find(|p| p.ident.name == fname.name) {
None => pats.push(dummy), None => pats.push(dummy),
@ -801,7 +801,7 @@ fn enter_rec_or_struct<'r,'b>(
} }
_ => { _ => {
assert_is_binding_or_wild(bcx, p); assert_is_binding_or_wild(bcx, p);
Some(vec::from_elem(fields.len(), dummy)) Some(Vec::from_elem(fields.len(), dummy))
} }
} }
}) })
@ -814,7 +814,7 @@ fn enter_tup<'r,'b>(
col: uint, col: uint,
val: ValueRef, val: ValueRef,
n_elts: uint) n_elts: uint)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_tup(bcx={}, m={}, col={}, val={})", debug!("enter_tup(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -826,7 +826,7 @@ fn enter_tup<'r,'b>(
enter_match(bcx, dm, m, col, val, |p| { enter_match(bcx, dm, m, col, val, |p| {
match p.node { match p.node {
ast::PatTup(ref elts) => { ast::PatTup(ref elts) => {
let mut new_elts = ~[]; let mut new_elts = Vec::new();
for elt in elts.iter() { for elt in elts.iter() {
new_elts.push((*elt).clone()) new_elts.push((*elt).clone())
} }
@ -834,7 +834,7 @@ fn enter_tup<'r,'b>(
} }
_ => { _ => {
assert_is_binding_or_wild(bcx, p); assert_is_binding_or_wild(bcx, p);
Some(vec::from_elem(n_elts, dummy)) Some(Vec::from_elem(n_elts, dummy))
} }
} }
}) })
@ -847,7 +847,7 @@ fn enter_tuple_struct<'r,'b>(
col: uint, col: uint,
val: ValueRef, val: ValueRef,
n_elts: uint) n_elts: uint)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_tuple_struct(bcx={}, m={}, col={}, val={})", debug!("enter_tuple_struct(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -863,7 +863,7 @@ fn enter_tuple_struct<'r,'b>(
} }
_ => { _ => {
assert_is_binding_or_wild(bcx, p); assert_is_binding_or_wild(bcx, p);
Some(vec::from_elem(n_elts, dummy)) Some(Vec::from_elem(n_elts, dummy))
} }
} }
}) })
@ -875,7 +875,7 @@ fn enter_uniq<'r,'b>(
m: &[Match<'r,'b>], m: &[Match<'r,'b>],
col: uint, col: uint,
val: ValueRef) val: ValueRef)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_uniq(bcx={}, m={}, col={}, val={})", debug!("enter_uniq(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -887,11 +887,11 @@ fn enter_uniq<'r,'b>(
enter_match(bcx, dm, m, col, val, |p| { enter_match(bcx, dm, m, col, val, |p| {
match p.node { match p.node {
ast::PatUniq(sub) => { ast::PatUniq(sub) => {
Some(~[sub]) Some(vec!(sub))
} }
_ => { _ => {
assert_is_binding_or_wild(bcx, p); assert_is_binding_or_wild(bcx, p);
Some(~[dummy]) Some(vec!(dummy))
} }
} }
}) })
@ -904,7 +904,7 @@ fn enter_region<'r,
m: &[Match<'r,'b>], m: &[Match<'r,'b>],
col: uint, col: uint,
val: ValueRef) val: ValueRef)
-> ~[Match<'r,'b>] { -> Vec<Match<'r,'b>> {
debug!("enter_region(bcx={}, m={}, col={}, val={})", debug!("enter_region(bcx={}, m={}, col={}, val={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
@ -916,11 +916,11 @@ fn enter_region<'r,
enter_match(bcx, dm, m, col, val, |p| { enter_match(bcx, dm, m, col, val, |p| {
match p.node { match p.node {
ast::PatRegion(sub) => { ast::PatRegion(sub) => {
Some(~[sub]) Some(vec!(sub))
} }
_ => { _ => {
assert_is_binding_or_wild(bcx, p); assert_is_binding_or_wild(bcx, p);
Some(~[dummy]) Some(vec!(dummy))
} }
} }
}) })
@ -929,9 +929,9 @@ fn enter_region<'r,
// Returns the options in one column of matches. An option is something that // Returns the options in one column of matches. An option is something that
// needs to be conditionally matched at runtime; for example, the discriminant // needs to be conditionally matched at runtime; for example, the discriminant
// on a set of enum variants or a literal. // on a set of enum variants or a literal.
fn get_options(bcx: &Block, m: &[Match], col: uint) -> ~[Opt] { fn get_options(bcx: &Block, m: &[Match], col: uint) -> Vec<Opt> {
let ccx = bcx.ccx(); let ccx = bcx.ccx();
fn add_to_set(tcx: ty::ctxt, set: &mut ~[Opt], val: Opt) { fn add_to_set(tcx: ty::ctxt, set: &mut Vec<Opt> , val: Opt) {
if set.iter().any(|l| opt_eq(tcx, l, &val)) {return;} if set.iter().any(|l| opt_eq(tcx, l, &val)) {return;}
set.push(val); set.push(val);
} }
@ -939,22 +939,25 @@ fn get_options(bcx: &Block, m: &[Match], col: uint) -> ~[Opt] {
// conditions over-match, we need to be careful about them. This // conditions over-match, we need to be careful about them. This
// means that in order to properly handle things in order, we need // means that in order to properly handle things in order, we need
// to not always merge conditions. // to not always merge conditions.
fn add_veclen_to_set(set: &mut ~[Opt], i: uint, fn add_veclen_to_set(set: &mut Vec<Opt> , i: uint,
len: uint, vlo: VecLenOpt) { len: uint, vlo: VecLenOpt) {
match set.last() { match set.last() {
// If the last condition in the list matches the one we want // If the last condition in the list matches the one we want
// to add, then extend its range. Otherwise, make a new // to add, then extend its range. Otherwise, make a new
// vec_len with a range just covering the new entry. // vec_len with a range just covering the new entry.
Some(&vec_len(len2, vlo2, (start, end))) Some(&vec_len(len2, vlo2, (start, end)))
if len == len2 && vlo == vlo2 => if len == len2 && vlo == vlo2 => {
set[set.len() - 1] = vec_len(len, vlo, (start, end+1)), let length = set.len();
*set.get_mut(length - 1) =
vec_len(len, vlo, (start, end+1))
}
_ => set.push(vec_len(len, vlo, (i, i))) _ => set.push(vec_len(len, vlo, (i, i)))
} }
} }
let mut found = ~[]; let mut found = Vec::new();
for (i, br) in m.iter().enumerate() { for (i, br) in m.iter().enumerate() {
let cur = br.pats[col]; let cur = *br.pats.get(col);
match cur.node { match cur.node {
ast::PatLit(l) => { ast::PatLit(l) => {
add_to_set(ccx.tcx, &mut found, lit(ExprLit(l))); add_to_set(ccx.tcx, &mut found, lit(ExprLit(l)));
@ -1020,7 +1023,7 @@ fn get_options(bcx: &Block, m: &[Match], col: uint) -> ~[Opt] {
} }
struct ExtractedBlock<'a> { struct ExtractedBlock<'a> {
vals: ~[ValueRef], vals: Vec<ValueRef> ,
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
} }
@ -1031,7 +1034,7 @@ fn extract_variant_args<'a>(
val: ValueRef) val: ValueRef)
-> ExtractedBlock<'a> { -> ExtractedBlock<'a> {
let _icx = push_ctxt("match::extract_variant_args"); let _icx = push_ctxt("match::extract_variant_args");
let args = vec::from_fn(adt::num_args(repr, disr_val), |i| { let args = Vec::from_fn(adt::num_args(repr, disr_val), |i| {
adt::trans_field_ptr(bcx, repr, val, disr_val, i) adt::trans_field_ptr(bcx, repr, val, disr_val, i)
}); });
@ -1066,7 +1069,7 @@ fn extract_vec_elems<'a>(
let (base, len) = vec_datum.get_vec_base_and_len(bcx); let (base, len) = vec_datum.get_vec_base_and_len(bcx);
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id)); let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
let mut elems = vec::from_fn(elem_count, |i| { let mut elems = Vec::from_fn(elem_count, |i| {
match slice { match slice {
None => GEPi(bcx, base, [i]), None => GEPi(bcx, base, [i]),
Some(n) if i < n => GEPi(bcx, base, [i]), Some(n) if i < n => GEPi(bcx, base, [i]),
@ -1092,7 +1095,7 @@ fn extract_vec_elems<'a>(
Store(bcx, slice_begin, Store(bcx, slice_begin,
GEPi(bcx, scratch.val, [0u, abi::slice_elt_base])); GEPi(bcx, scratch.val, [0u, abi::slice_elt_base]));
Store(bcx, slice_len, GEPi(bcx, scratch.val, [0u, abi::slice_elt_len])); Store(bcx, slice_len, GEPi(bcx, scratch.val, [0u, abi::slice_elt_len]));
elems[n] = scratch.val; *elems.get_mut(n) = scratch.val;
} }
ExtractedBlock { vals: elems, bcx: bcx } ExtractedBlock { vals: elems, bcx: bcx }
@ -1108,13 +1111,13 @@ fn collect_record_or_struct_fields<'a>(
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
m: &[Match], m: &[Match],
col: uint) col: uint)
-> Option<~[ast::Ident]> { -> Option<Vec<ast::Ident> > {
let mut fields: ~[ast::Ident] = ~[]; let mut fields: Vec<ast::Ident> = Vec::new();
let mut found = false; let mut found = false;
for br in m.iter() { for br in m.iter() {
match br.pats[col].node { match br.pats.get(col).node {
ast::PatStruct(_, ref fs, _) => { ast::PatStruct(_, ref fs, _) => {
match ty::get(node_id_type(bcx, br.pats[col].id)).sty { match ty::get(node_id_type(bcx, br.pats.get(col).id)).sty {
ty::ty_struct(..) => { ty::ty_struct(..) => {
extend(&mut fields, fs.as_slice()); extend(&mut fields, fs.as_slice());
found = true; found = true;
@ -1131,7 +1134,7 @@ fn collect_record_or_struct_fields<'a>(
return None; return None;
} }
fn extend(idents: &mut ~[ast::Ident], field_pats: &[ast::FieldPat]) { fn extend(idents: &mut Vec<ast::Ident> , field_pats: &[ast::FieldPat]) {
for field_pat in field_pats.iter() { for field_pat in field_pats.iter() {
let field_ident = field_pat.ident; let field_ident = field_pat.ident;
if !idents.iter().any(|x| x.name == field_ident.name) { if !idents.iter().any(|x| x.name == field_ident.name) {
@ -1143,7 +1146,7 @@ fn collect_record_or_struct_fields<'a>(
fn pats_require_rooting(bcx: &Block, m: &[Match], col: uint) -> bool { fn pats_require_rooting(bcx: &Block, m: &[Match], col: uint) -> bool {
m.iter().any(|br| { m.iter().any(|br| {
let pat_id = br.pats[col].id; let pat_id = br.pats.get(col).id;
let key = root_map_key {id: pat_id, derefs: 0u }; let key = root_map_key {id: pat_id, derefs: 0u };
let root_map = bcx.ccx().maps.root_map.borrow(); let root_map = bcx.ccx().maps.root_map.borrow();
root_map.get().contains_key(&key) root_map.get().contains_key(&key)
@ -1157,7 +1160,7 @@ fn pats_require_rooting(bcx: &Block, m: &[Match], col: uint) -> bool {
macro_rules! any_pat ( macro_rules! any_pat (
($m:expr, $pattern:pat) => ( ($m:expr, $pattern:pat) => (
($m).iter().any(|br| { ($m).iter().any(|br| {
match br.pats[col].node { match br.pats.get(col).node {
$pattern => true, $pattern => true,
_ => false _ => false
} }
@ -1179,7 +1182,7 @@ fn any_tup_pat(m: &[Match], col: uint) -> bool {
fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool { fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool {
m.iter().any(|br| { m.iter().any(|br| {
let pat = br.pats[col]; let pat = *br.pats.get(col);
match pat.node { match pat.node {
ast::PatEnum(_, Some(_)) => { ast::PatEnum(_, Some(_)) => {
let def_map = bcx.tcx().def_map.borrow(); let def_map = bcx.tcx().def_map.borrow();
@ -1256,10 +1259,10 @@ fn pick_col(m: &[Match]) -> uint {
_ => 0u _ => 0u
} }
} }
let mut scores = vec::from_elem(m[0].pats.len(), 0u); let mut scores = Vec::from_elem(m[0].pats.len(), 0u);
for br in m.iter() { for br in m.iter() {
for (i, p) in br.pats.iter().enumerate() { for (i, p) in br.pats.iter().enumerate() {
scores[i] += score(*p); *scores.get_mut(i) += score(*p);
} }
} }
let mut max_score = 0u; let mut max_score = 0u;
@ -1512,7 +1515,12 @@ fn compile_submatch<'r,
if has_nested_bindings(m, col) { if has_nested_bindings(m, col) {
let expanded = expand_nested_bindings(bcx, m, col, val); let expanded = expand_nested_bindings(bcx, m, col, val);
compile_submatch_continue(bcx, expanded, vals, chk, col, val) compile_submatch_continue(bcx,
expanded.as_slice(),
vals,
chk,
col,
val)
} else { } else {
compile_submatch_continue(bcx, m, vals, chk, col, val) compile_submatch_continue(bcx, m, vals, chk, col, val)
} }
@ -1530,15 +1538,15 @@ fn compile_submatch_continue<'r,
let tcx = bcx.tcx(); let tcx = bcx.tcx();
let dm = tcx.def_map; let dm = tcx.def_map;
let vals_left = vec::append(vals.slice(0u, col).to_owned(), let vals_left = vec_ng::append(Vec::from_slice(vals.slice(0u, col)),
vals.slice(col + 1u, vals.len())); vals.slice(col + 1u, vals.len()));
let ccx = bcx.fcx.ccx; let ccx = bcx.fcx.ccx;
let mut pat_id = 0; let mut pat_id = 0;
for br in m.iter() { for br in m.iter() {
// Find a real id (we're adding placeholder wildcard patterns, but // Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern) // each column is guaranteed to have at least one real pattern)
if pat_id == 0 { if pat_id == 0 {
pat_id = br.pats[col].id; pat_id = br.pats.get(col).id;
} }
} }
@ -1557,8 +1565,14 @@ fn compile_submatch_continue<'r,
}); });
compile_submatch( compile_submatch(
bcx, bcx,
enter_rec_or_struct(bcx, dm, m, col, *rec_fields, val), enter_rec_or_struct(bcx,
vec::append(rec_vals, vals_left), dm,
m,
col,
rec_fields.as_slice(),
val).as_slice(),
vec_ng::append(rec_vals,
vals_left.as_slice()).as_slice(),
chk); chk);
}); });
return; return;
@ -1573,11 +1587,19 @@ fn compile_submatch_continue<'r,
ty::ty_tup(ref elts) => elts.len(), ty::ty_tup(ref elts) => elts.len(),
_ => ccx.sess.bug("non-tuple type in tuple pattern") _ => ccx.sess.bug("non-tuple type in tuple pattern")
}; };
let tup_vals = vec::from_fn(n_tup_elts, |i| { let tup_vals = Vec::from_fn(n_tup_elts, |i| {
adt::trans_field_ptr(bcx, tup_repr, val, 0, i) adt::trans_field_ptr(bcx, tup_repr, val, 0, i)
}); });
compile_submatch(bcx, enter_tup(bcx, dm, m, col, val, n_tup_elts), compile_submatch(bcx,
vec::append(tup_vals, vals_left), chk); enter_tup(bcx,
dm,
m,
col,
val,
n_tup_elts).as_slice(),
vec_ng::append(tup_vals,
vals_left.as_slice()).as_slice(),
chk);
return; return;
} }
@ -1595,28 +1617,35 @@ fn compile_submatch_continue<'r,
} }
let struct_repr = adt::represent_type(bcx.ccx(), struct_ty); let struct_repr = adt::represent_type(bcx.ccx(), struct_ty);
let llstructvals = vec::from_fn(struct_element_count, |i| { let llstructvals = Vec::from_fn(struct_element_count, |i| {
adt::trans_field_ptr(bcx, struct_repr, val, 0, i) adt::trans_field_ptr(bcx, struct_repr, val, 0, i)
}); });
compile_submatch(bcx, compile_submatch(bcx,
enter_tuple_struct(bcx, dm, m, col, val, enter_tuple_struct(bcx, dm, m, col, val,
struct_element_count), struct_element_count).as_slice(),
vec::append(llstructvals, vals_left), vec_ng::append(llstructvals,
vals_left.as_slice()).as_slice(),
chk); chk);
return; return;
} }
if any_uniq_pat(m, col) { if any_uniq_pat(m, col) {
let llbox = Load(bcx, val); let llbox = Load(bcx, val);
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val), compile_submatch(bcx,
vec::append(~[llbox], vals_left), chk); enter_uniq(bcx, dm, m, col, val).as_slice(),
vec_ng::append(vec!(llbox),
vals_left.as_slice()).as_slice(),
chk);
return; return;
} }
if any_region_pat(m, col) { if any_region_pat(m, col) {
let loaded_val = Load(bcx, val); let loaded_val = Load(bcx, val);
compile_submatch(bcx, enter_region(bcx, dm, m, col, val), compile_submatch(bcx,
vec::append(~[loaded_val], vals_left), chk); enter_region(bcx, dm, m, col, val).as_slice(),
vec_ng::append(vec!(loaded_val),
vals_left.as_slice()).as_slice(),
chk);
return; return;
} }
@ -1627,7 +1656,7 @@ fn compile_submatch_continue<'r,
let mut test_val = val; let mut test_val = val;
debug!("test_val={}", bcx.val_to_str(test_val)); debug!("test_val={}", bcx.val_to_str(test_val));
if opts.len() > 0u { if opts.len() > 0u {
match opts[0] { match *opts.get(0) {
var(_, repr) => { var(_, repr) => {
let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val); let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val);
kind = the_kind; kind = the_kind;
@ -1773,7 +1802,7 @@ fn compile_submatch_continue<'r,
} }
let mut size = 0u; let mut size = 0u;
let mut unpacked = ~[]; let mut unpacked = Vec::new();
match *opt { match *opt {
var(disr_val, repr) => { var(disr_val, repr) => {
let ExtractedBlock {vals: argvals, bcx: new_bcx} = let ExtractedBlock {vals: argvals, bcx: new_bcx} =
@ -1796,12 +1825,20 @@ fn compile_submatch_continue<'r,
lit(_) | range(_, _) => () lit(_) | range(_, _) => ()
} }
let opt_ms = enter_opt(opt_cx, m, opt, col, size, val); let opt_ms = enter_opt(opt_cx, m, opt, col, size, val);
let opt_vals = vec::append(unpacked, vals_left); let opt_vals = vec_ng::append(unpacked, vals_left.as_slice());
match branch_chk { match branch_chk {
None => compile_submatch(opt_cx, opt_ms, opt_vals, chk), None => {
compile_submatch(opt_cx,
opt_ms.as_slice(),
opt_vals.as_slice(),
chk)
}
Some(branch_chk) => { Some(branch_chk) => {
compile_submatch(opt_cx, opt_ms, opt_vals, &branch_chk) compile_submatch(opt_cx,
opt_ms.as_slice(),
opt_vals.as_slice(),
&branch_chk)
} }
} }
} }
@ -1812,7 +1849,10 @@ fn compile_submatch_continue<'r,
Br(bcx, else_cx.llbb); Br(bcx, else_cx.llbb);
} }
if kind != single { if kind != single {
compile_submatch(else_cx, defaults, vals_left, chk); compile_submatch(else_cx,
defaults.as_slice(),
vals_left.as_slice(),
chk);
} }
} }
} }
@ -1884,8 +1924,8 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
return bcx; return bcx;
} }
let mut arm_datas = ~[]; let mut arm_datas = Vec::new();
let mut matches = ~[]; let mut matches = Vec::new();
for arm in arms.iter() { for arm in arms.iter() {
let body = fcx.new_id_block("case_body", arm.body.id); let body = fcx.new_id_block("case_body", arm.body.id);
let bindings_map = create_bindings_map(bcx, *arm.pats.get(0)); let bindings_map = create_bindings_map(bcx, *arm.pats.get(0));
@ -1897,9 +1937,9 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
arm_datas.push(arm_data.clone()); arm_datas.push(arm_data.clone());
for p in arm.pats.iter() { for p in arm.pats.iter() {
matches.push(Match { matches.push(Match {
pats: ~[*p], pats: vec!(*p),
data: arm_data.clone(), data: arm_data.clone(),
bound_ptrs: ~[], bound_ptrs: Vec::new(),
}); });
} }
} }
@ -1922,9 +1962,9 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
} }
}; };
let lldiscr = discr_datum.val; let lldiscr = discr_datum.val;
compile_submatch(bcx, matches, [lldiscr], &chk); compile_submatch(bcx, matches.as_slice(), [lldiscr], &chk);
let mut arm_cxs = ~[]; let mut arm_cxs = Vec::new();
for arm_data in arm_datas.iter() { for arm_data in arm_datas.iter() {
let mut bcx = arm_data.bodycx; let mut bcx = arm_data.bodycx;
@ -1945,7 +1985,7 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
arm_cxs.push(bcx); arm_cxs.push(bcx);
} }
bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs); bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.as_slice());
return bcx; return bcx;
} }

View File

@ -53,17 +53,18 @@ use middle::trans::_match;
use middle::trans::build::*; use middle::trans::build::*;
use middle::trans::common::*; use middle::trans::common::*;
use middle::trans::machine; use middle::trans::machine;
use middle::trans::type_::Type;
use middle::trans::type_of; use middle::trans::type_of;
use middle::ty; use middle::ty;
use middle::ty::Disr; use middle::ty::Disr;
use std::vec_ng::Vec;
use std::vec_ng;
use syntax::abi::{X86, X86_64, Arm, Mips}; use syntax::abi::{X86, X86_64, Arm, Mips};
use syntax::ast; use syntax::ast;
use syntax::attr; use syntax::attr;
use syntax::attr::IntType; use syntax::attr::IntType;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use middle::trans::type_::Type;
type Hint = attr::ReprAttr; type Hint = attr::ReprAttr;
@ -83,7 +84,7 @@ pub enum Repr {
* General-case enums: for each case there is a struct, and they * General-case enums: for each case there is a struct, and they
* all start with a field for the discriminant. * all start with a field for the discriminant.
*/ */
General(IntType, ~[Struct]), General(IntType, Vec<Struct> ),
/** /**
* Two cases distinguished by a nullable pointer: the case with discriminant * Two cases distinguished by a nullable pointer: the case with discriminant
* `nndiscr` is represented by the struct `nonnull`, where the `ptrfield`th * `nndiscr` is represented by the struct `nonnull`, where the `ptrfield`th
@ -96,7 +97,7 @@ pub enum Repr {
* identity function. * identity function.
*/ */
NullablePointer{ nonnull: Struct, nndiscr: Disr, ptrfield: uint, NullablePointer{ nonnull: Struct, nndiscr: Disr, ptrfield: uint,
nullfields: ~[ty::t] } nullfields: Vec<ty::t> }
} }
/// For structs, and struct-like parts of anything fancier. /// For structs, and struct-like parts of anything fancier.
@ -104,8 +105,7 @@ pub struct Struct {
size: u64, size: u64,
align: u64, align: u64,
packed: bool, packed: bool,
fields: ~[ty::t] fields: Vec<ty::t> }
}
/** /**
* Convenience for `represent_type`. There should probably be more or * Convenience for `represent_type`. There should probably be more or
@ -137,7 +137,7 @@ pub fn represent_type(cx: &CrateContext, t: ty::t) -> @Repr {
fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr { fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
match ty::get(t).sty { match ty::get(t).sty {
ty::ty_tup(ref elems) => { ty::ty_tup(ref elems) => {
return Univariant(mk_struct(cx, *elems, false), false) return Univariant(mk_struct(cx, elems.as_slice(), false), false)
} }
ty::ty_struct(def_id, ref substs) => { ty::ty_struct(def_id, ref substs) => {
let fields = ty::lookup_struct_fields(cx.tcx, def_id); let fields = ty::lookup_struct_fields(cx.tcx, def_id);
@ -148,7 +148,7 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
let dtor = ty::ty_dtor(cx.tcx, def_id).has_drop_flag(); let dtor = ty::ty_dtor(cx.tcx, def_id).has_drop_flag();
if dtor { ftys.push(ty::mk_bool()); } if dtor { ftys.push(ty::mk_bool()); }
return Univariant(mk_struct(cx, ftys, packed), dtor) return Univariant(mk_struct(cx, ftys.as_slice(), packed), dtor)
} }
ty::ty_enum(def_id, ref substs) => { ty::ty_enum(def_id, ref substs) => {
let cases = get_cases(cx.tcx, def_id, substs); let cases = get_cases(cx.tcx, def_id, substs);
@ -186,23 +186,29 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
// Equivalent to a struct/tuple/newtype. // Equivalent to a struct/tuple/newtype.
// (Typechecking will reject discriminant-sizing attrs.) // (Typechecking will reject discriminant-sizing attrs.)
assert_eq!(hint, attr::ReprAny); assert_eq!(hint, attr::ReprAny);
return Univariant(mk_struct(cx, cases[0].tys, false), false) return Univariant(mk_struct(cx,
cases.get(0).tys.as_slice(),
false),
false)
} }
if cases.len() == 2 && hint == attr::ReprAny { if cases.len() == 2 && hint == attr::ReprAny {
// Nullable pointer optimization // Nullable pointer optimization
let mut discr = 0; let mut discr = 0;
while discr < 2 { while discr < 2 {
if cases[1 - discr].is_zerolen(cx) { if cases.get(1 - discr).is_zerolen(cx) {
match cases[discr].find_ptr() { match cases.get(discr).find_ptr() {
Some(ptrfield) => { Some(ptrfield) => {
return NullablePointer { return NullablePointer {
nndiscr: discr, nndiscr: discr as u64,
nonnull: mk_struct(cx, nonnull: mk_struct(cx,
cases[discr].tys, cases.get(discr)
.tys
.as_slice(),
false), false),
ptrfield: ptrfield, ptrfield: ptrfield,
nullfields: cases[1 - discr].tys.clone() nullfields: cases.get(1 - discr).tys
.clone()
} }
} }
None => { } None => { }
@ -217,8 +223,12 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64, let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
slo: 0, shi: (cases.len() - 1) as i64 }; slo: 0, shi: (cases.len() - 1) as i64 };
let ity = range_to_inttype(cx, hint, &bounds); let ity = range_to_inttype(cx, hint, &bounds);
let discr = ~[ty_of_inttype(ity)]; return General(ity, cases.map(|c| {
return General(ity, cases.map(|c| mk_struct(cx, discr + c.tys, false))) let discr = vec!(ty_of_inttype(ity));
mk_struct(cx,
vec_ng::append(discr, c.tys.as_slice()).as_slice(),
false)
}))
} }
_ => cx.sess.bug("adt::represent_type called on non-ADT type") _ => cx.sess.bug("adt::represent_type called on non-ADT type")
} }
@ -254,17 +264,17 @@ pub fn is_ffi_safe(tcx: ty::ctxt, def_id: ast::DefId) -> bool {
} }
// this should probably all be in ty // this should probably all be in ty
struct Case { discr: Disr, tys: ~[ty::t] } struct Case { discr: Disr, tys: Vec<ty::t> }
impl Case { impl Case {
fn is_zerolen(&self, cx: &CrateContext) -> bool { fn is_zerolen(&self, cx: &CrateContext) -> bool {
mk_struct(cx, self.tys, false).size == 0 mk_struct(cx, self.tys.as_slice(), false).size == 0
} }
fn find_ptr(&self) -> Option<uint> { fn find_ptr(&self) -> Option<uint> {
self.tys.iter().position(|&ty| mono_data_classify(ty) == MonoNonNull) self.tys.iter().position(|&ty| mono_data_classify(ty) == MonoNonNull)
} }
} }
fn get_cases(tcx: ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> ~[Case] { fn get_cases(tcx: ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> Vec<Case> {
ty::enum_variants(tcx, def_id).map(|vi| { ty::enum_variants(tcx, def_id).map(|vi| {
let arg_tys = vi.args.map(|&raw_ty| { let arg_tys = vi.args.map(|&raw_ty| {
ty::subst(tcx, substs, raw_ty) ty::subst(tcx, substs, raw_ty)
@ -281,7 +291,7 @@ fn mk_struct(cx: &CrateContext, tys: &[ty::t], packed: bool) -> Struct {
size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64, size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64,
align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64, align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64,
packed: packed, packed: packed,
fields: tys.to_owned(), fields: Vec::from_slice(tys),
} }
} }
@ -394,7 +404,8 @@ pub fn finish_type_of(cx: &CrateContext, r: &Repr, llty: &mut Type) {
match *r { match *r {
CEnum(..) | General(..) => { } CEnum(..) | General(..) => { }
Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } => Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } =>
llty.set_struct_body(struct_llfields(cx, st, false), st.packed) llty.set_struct_body(struct_llfields(cx, st, false).as_slice(),
st.packed)
} }
} }
@ -403,7 +414,10 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
CEnum(ity, _, _) => ll_inttype(cx, ity), CEnum(ity, _, _) => ll_inttype(cx, ity),
Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } => { Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } => {
match name { match name {
None => Type::struct_(struct_llfields(cx, st, sizing), st.packed), None => {
Type::struct_(struct_llfields(cx, st, sizing).as_slice(),
st.packed)
}
Some(name) => { assert_eq!(sizing, false); Type::named_struct(name) } Some(name) => { assert_eq!(sizing, false); Type::named_struct(name) }
} }
} }
@ -438,14 +452,14 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
}; };
assert_eq!(machine::llalign_of_min(cx, pad_ty) as u64, align); assert_eq!(machine::llalign_of_min(cx, pad_ty) as u64, align);
assert_eq!(align % discr_size, 0); assert_eq!(align % discr_size, 0);
let fields = ~[discr_ty, let fields = vec!(discr_ty,
Type::array(&discr_ty, align / discr_size - 1), Type::array(&discr_ty, align / discr_size - 1),
pad_ty]; pad_ty);
match name { match name {
None => Type::struct_(fields, false), None => Type::struct_(fields.as_slice(), false),
Some(name) => { Some(name) => {
let mut llty = Type::named_struct(name); let mut llty = Type::named_struct(name);
llty.set_struct_body(fields, false); llty.set_struct_body(fields.as_slice(), false);
llty llty
} }
} }
@ -453,7 +467,7 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
} }
} }
fn struct_llfields(cx: &CrateContext, st: &Struct, sizing: bool) -> ~[Type] { fn struct_llfields(cx: &CrateContext, st: &Struct, sizing: bool) -> Vec<Type> {
if sizing { if sizing {
st.fields.map(|&ty| type_of::sizing_type_of(cx, ty)) st.fields.map(|&ty| type_of::sizing_type_of(cx, ty))
} else { } else {
@ -518,7 +532,7 @@ fn nullable_bitdiscr(bcx: &Block, nonnull: &Struct, nndiscr: Disr, ptrfield: uin
scrutinee: ValueRef) -> ValueRef { scrutinee: ValueRef) -> ValueRef {
let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
let llptr = Load(bcx, GEPi(bcx, scrutinee, [0, ptrfield])); let llptr = Load(bcx, GEPi(bcx, scrutinee, [0, ptrfield]));
let llptrty = type_of::type_of(bcx.ccx(), nonnull.fields[ptrfield]); let llptrty = type_of::type_of(bcx.ccx(), *nonnull.fields.get(ptrfield));
ICmp(bcx, cmp, llptr, C_null(llptrty)) ICmp(bcx, cmp, llptr, C_null(llptrty))
} }
@ -599,7 +613,8 @@ pub fn trans_start_init(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr) {
NullablePointer{ nonnull: ref nonnull, nndiscr, ptrfield, .. } => { NullablePointer{ nonnull: ref nonnull, nndiscr, ptrfield, .. } => {
if discr != nndiscr { if discr != nndiscr {
let llptrptr = GEPi(bcx, val, [0, ptrfield]); let llptrptr = GEPi(bcx, val, [0, ptrfield]);
let llptrty = type_of::type_of(bcx.ccx(), nonnull.fields[ptrfield]); let llptrty = type_of::type_of(bcx.ccx(),
*nonnull.fields.get(ptrfield));
Store(bcx, C_null(llptrty), llptrptr) Store(bcx, C_null(llptrty), llptrptr)
} }
} }
@ -624,7 +639,7 @@ pub fn num_args(r: &Repr, discr: Disr) -> uint {
assert_eq!(discr, 0); assert_eq!(discr, 0);
st.fields.len() - (if dtor { 1 } else { 0 }) st.fields.len() - (if dtor { 1 } else { 0 })
} }
General(_, ref cases) => cases[discr].fields.len() - 1, General(_, ref cases) => cases.get(discr as uint).fields.len() - 1,
NullablePointer{ nonnull: ref nonnull, nndiscr, NullablePointer{ nonnull: ref nonnull, nndiscr,
nullfields: ref nullfields, .. } => { nullfields: ref nullfields, .. } => {
if discr == nndiscr { nonnull.fields.len() } else { nullfields.len() } if discr == nndiscr { nonnull.fields.len() } else { nullfields.len() }
@ -639,11 +654,11 @@ pub fn deref_ty(ccx: &CrateContext, r: &Repr) -> ty::t {
ccx.sess.bug("deref of c-like enum") ccx.sess.bug("deref of c-like enum")
} }
Univariant(ref st, _) => { Univariant(ref st, _) => {
st.fields[0] *st.fields.get(0)
} }
General(_, ref cases) => { General(_, ref cases) => {
assert!(cases.len() == 1); assert!(cases.len() == 1);
cases[0].fields[0] *cases.get(0).fields.get(0)
} }
NullablePointer{ .. } => { NullablePointer{ .. } => {
ccx.sess.bug("deref of nullable ptr") ccx.sess.bug("deref of nullable ptr")
@ -666,7 +681,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
struct_field_ptr(bcx, st, val, ix, false) struct_field_ptr(bcx, st, val, ix, false)
} }
General(_, ref cases) => { General(_, ref cases) => {
struct_field_ptr(bcx, &cases[discr], val, ix + 1, true) struct_field_ptr(bcx, cases.get(discr as uint), val, ix + 1, true)
} }
NullablePointer{ nonnull: ref nonnull, nullfields: ref nullfields, NullablePointer{ nonnull: ref nonnull, nullfields: ref nullfields,
nndiscr, .. } => { nndiscr, .. } => {
@ -675,7 +690,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
} else { } else {
// The unit-like case might have a nonzero number of unit-like fields. // The unit-like case might have a nonzero number of unit-like fields.
// (e.g., Result or Either with () as one side.) // (e.g., Result or Either with () as one side.)
let ty = type_of::type_of(bcx.ccx(), nullfields[ix]); let ty = type_of::type_of(bcx.ccx(), *nullfields.get(ix));
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0); assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
// The contents of memory at this pointer can't matter, but use // The contents of memory at this pointer can't matter, but use
// the value that's "reasonable" in case of pointer comparison. // the value that's "reasonable" in case of pointer comparison.
@ -691,7 +706,7 @@ fn struct_field_ptr(bcx: &Block, st: &Struct, val: ValueRef, ix: uint,
let val = if needs_cast { let val = if needs_cast {
let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty)); let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
let real_ty = Type::struct_(fields, st.packed); let real_ty = Type::struct_(fields.as_slice(), st.packed);
PointerCast(bcx, val, real_ty.ptr_to()) PointerCast(bcx, val, real_ty.ptr_to())
} else { } else {
val val
@ -738,27 +753,40 @@ pub fn trans_const(ccx: &CrateContext, r: &Repr, discr: Disr,
C_integral(ll_inttype(ccx, ity), discr as u64, true) C_integral(ll_inttype(ccx, ity), discr as u64, true)
} }
General(ity, ref cases) => { General(ity, ref cases) => {
let case = &cases[discr]; let case = cases.get(discr as uint);
let max_sz = cases.iter().map(|x| x.size).max().unwrap(); let max_sz = cases.iter().map(|x| x.size).max().unwrap();
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true); let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
let contents = build_const_struct(ccx, case, ~[lldiscr] + vals); let contents = build_const_struct(ccx,
C_struct(contents + &[padding(max_sz - case.size)], false) case,
vec_ng::append(
vec!(lldiscr),
vals).as_slice());
C_struct(vec_ng::append(
contents,
&[padding(max_sz - case.size)]).as_slice(),
false)
} }
Univariant(ref st, _dro) => { Univariant(ref st, _dro) => {
assert!(discr == 0); assert!(discr == 0);
let contents = build_const_struct(ccx, st, vals); let contents = build_const_struct(ccx, st, vals);
C_struct(contents, st.packed) C_struct(contents.as_slice(), st.packed)
} }
NullablePointer{ nonnull: ref nonnull, nndiscr, .. } => { NullablePointer{ nonnull: ref nonnull, nndiscr, .. } => {
if discr == nndiscr { if discr == nndiscr {
C_struct(build_const_struct(ccx, nonnull, vals), false) C_struct(build_const_struct(ccx,
nonnull,
vals.as_slice()).as_slice(),
false)
} else { } else {
let vals = nonnull.fields.map(|&ty| { let vals = nonnull.fields.map(|&ty| {
// Always use null even if it's not the `ptrfield`th // Always use null even if it's not the `ptrfield`th
// field; see #8506. // field; see #8506.
C_null(type_of::sizing_type_of(ccx, ty)) C_null(type_of::sizing_type_of(ccx, ty))
}); }).move_iter().collect::<Vec<ValueRef> >();
C_struct(build_const_struct(ccx, nonnull, vals), false) C_struct(build_const_struct(ccx,
nonnull,
vals.as_slice()).as_slice(),
false)
} }
} }
} }
@ -775,11 +803,11 @@ pub fn trans_const(ccx: &CrateContext, r: &Repr, discr: Disr,
* will read the wrong memory. * will read the wrong memory.
*/ */
fn build_const_struct(ccx: &CrateContext, st: &Struct, vals: &[ValueRef]) fn build_const_struct(ccx: &CrateContext, st: &Struct, vals: &[ValueRef])
-> ~[ValueRef] { -> Vec<ValueRef> {
assert_eq!(vals.len(), st.fields.len()); assert_eq!(vals.len(), st.fields.len());
let mut offset = 0; let mut offset = 0;
let mut cfields = ~[]; let mut cfields = Vec::new();
for (i, &ty) in st.fields.iter().enumerate() { for (i, &ty) in st.fields.iter().enumerate() {
let llty = type_of::sizing_type_of(ccx, ty); let llty = type_of::sizing_type_of(ccx, ty);
let type_align = machine::llalign_of_min(ccx, llty) let type_align = machine::llalign_of_min(ccx, llty)

View File

@ -12,8 +12,6 @@
# Translation of inline assembly. # Translation of inline assembly.
*/ */
use std::c_str::ToCStr;
use lib; use lib;
use middle::trans::build::*; use middle::trans::build::*;
use middle::trans::callee; use middle::trans::callee;
@ -22,9 +20,10 @@ use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods; use middle::trans::cleanup::CleanupMethods;
use middle::trans::expr; use middle::trans::expr;
use middle::trans::type_of; use middle::trans::type_of;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::c_str::ToCStr;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
// Take an inline assembly expression and splat it out via LLVM // Take an inline assembly expression and splat it out via LLVM
@ -32,8 +31,8 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
-> &'a Block<'a> { -> &'a Block<'a> {
let fcx = bcx.fcx; let fcx = bcx.fcx;
let mut bcx = bcx; let mut bcx = bcx;
let mut constraints = ~[]; let mut constraints = Vec::new();
let mut output_types = ~[]; let mut output_types = Vec::new();
let temp_scope = fcx.push_custom_cleanup_scope(); let temp_scope = fcx.push_custom_cleanup_scope();
@ -88,9 +87,9 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
let output_type = if num_outputs == 0 { let output_type = if num_outputs == 0 {
Type::void() Type::void()
} else if num_outputs == 1 { } else if num_outputs == 1 {
output_types[0] *output_types.get(0)
} else { } else {
Type::struct_(output_types, false) Type::struct_(output_types.as_slice(), false)
}; };
let dialect = match ia.dialect { let dialect = match ia.dialect {

View File

@ -73,11 +73,12 @@ use util::sha2::Sha256;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
use arena::TypedArena; use arena::TypedArena;
use collections::HashMap;
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use collections::HashMap;
use std::libc::c_uint; use std::libc::c_uint;
use std::local_data; use std::local_data;
use std::vec_ng::Vec;
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32}; use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
use syntax::ast_map::PathName; use syntax::ast_map::PathName;
use syntax::ast_util::{local_def, is_local}; use syntax::ast_util::{local_def, is_local};
@ -94,19 +95,19 @@ use time;
pub use middle::trans::context::task_llcx; pub use middle::trans::context::task_llcx;
local_data_key!(task_local_insn_key: ~[&'static str]) local_data_key!(task_local_insn_key: Vec<&'static str> )
pub fn with_insn_ctxt(blk: |&[&'static str]|) { pub fn with_insn_ctxt(blk: |&[&'static str]|) {
local_data::get(task_local_insn_key, |c| { local_data::get(task_local_insn_key, |c| {
match c { match c {
Some(ctx) => blk(*ctx), Some(ctx) => blk(ctx.as_slice()),
None => () None => ()
} }
}) })
} }
pub fn init_insn_ctxt() { pub fn init_insn_ctxt() {
local_data::set(task_local_insn_key, ~[]); local_data::set(task_local_insn_key, Vec::new());
} }
pub struct _InsnCtxt { _x: () } pub struct _InsnCtxt { _x: () }
@ -543,7 +544,7 @@ pub fn get_res_dtor(ccx: @CrateContext,
let tsubsts = ty::substs { let tsubsts = ty::substs {
regions: ty::ErasedRegions, regions: ty::ErasedRegions,
self_ty: None, self_ty: None,
tps: substs.to_owned() tps: Vec::from_slice(substs),
}; };
let vtables = typeck::check::vtable::trans_resolve_method(ccx.tcx, did.node, &tsubsts); let vtables = typeck::check::vtable::trans_resolve_method(ccx.tcx, did.node, &tsubsts);
@ -752,8 +753,8 @@ pub fn iter_structural_ty<'r,
match adt::trans_switch(cx, repr, av) { match adt::trans_switch(cx, repr, av) {
(_match::single, None) => { (_match::single, None) => {
cx = iter_variant(cx, repr, av, variants[0], cx = iter_variant(cx, repr, av, *variants.get(0),
substs.tps, f); substs.tps.as_slice(), f);
} }
(_match::switch, Some(lldiscrim_a)) => { (_match::switch, Some(lldiscrim_a)) => {
cx = f(cx, lldiscrim_a, ty::mk_int()); cx = f(cx, lldiscrim_a, ty::mk_int());
@ -775,8 +776,12 @@ pub fn iter_structural_ty<'r,
in iter_structural_ty") in iter_structural_ty")
} }
let variant_cx = let variant_cx =
iter_variant(variant_cx, repr, av, *variant, iter_variant(variant_cx,
substs.tps, |x,y,z| f(x,y,z)); repr,
av,
*variant,
substs.tps.as_slice(),
|x,y,z| f(x,y,z));
Br(variant_cx, next_cx.llbb); Br(variant_cx, next_cx.llbb);
} }
cx = next_cx; cx = next_cx;
@ -876,7 +881,11 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
match fn_ty.abis.for_target(ccx.sess.targ_cfg.os, match fn_ty.abis.for_target(ccx.sess.targ_cfg.os,
ccx.sess.targ_cfg.arch) { ccx.sess.targ_cfg.arch) {
Some(Rust) | Some(RustIntrinsic) => { Some(Rust) | Some(RustIntrinsic) => {
get_extern_rust_fn(ccx, fn_ty.sig.inputs, fn_ty.sig.output, name, did) get_extern_rust_fn(ccx,
fn_ty.sig.inputs.as_slice(),
fn_ty.sig.output,
name,
did)
} }
Some(..) | None => { Some(..) | None => {
let c = foreign::llvm_calling_convention(ccx, fn_ty.abis); let c = foreign::llvm_calling_convention(ccx, fn_ty.abis);
@ -889,7 +898,11 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
} }
} }
ty::ty_closure(ref f) => { ty::ty_closure(ref f) => {
get_extern_rust_fn(ccx, f.sig.inputs, f.sig.output, name, did) get_extern_rust_fn(ccx,
f.sig.inputs.as_slice(),
f.sig.output,
name,
did)
} }
_ => { _ => {
let llty = type_of(ccx, t); let llty = type_of(ccx, t);
@ -902,7 +915,7 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
pub fn invoke<'a>( pub fn invoke<'a>(
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
llfn: ValueRef, llfn: ValueRef,
llargs: ~[ValueRef], llargs: Vec<ValueRef> ,
attributes: &[(uint, lib::llvm::Attribute)], attributes: &[(uint, lib::llvm::Attribute)],
call_info: Option<NodeInfo>) call_info: Option<NodeInfo>)
-> (ValueRef, &'a Block<'a>) { -> (ValueRef, &'a Block<'a>) {
@ -935,7 +948,7 @@ pub fn invoke<'a>(
let llresult = Invoke(bcx, let llresult = Invoke(bcx,
llfn, llfn,
llargs, llargs.as_slice(),
normal_bcx.llbb, normal_bcx.llbb,
landing_pad, landing_pad,
attributes); attributes);
@ -951,7 +964,7 @@ pub fn invoke<'a>(
None => debuginfo::clear_source_location(bcx.fcx) None => debuginfo::clear_source_location(bcx.fcx)
}; };
let llresult = Call(bcx, llfn, llargs, attributes); let llresult = Call(bcx, llfn, llargs.as_slice(), attributes);
return (llresult, bcx); return (llresult, bcx);
} }
} }
@ -1231,7 +1244,10 @@ pub fn new_fn_ctxt<'a>(ccx: @CrateContext,
let substd_output_type = match param_substs { let substd_output_type = match param_substs {
None => output_type, None => output_type,
Some(substs) => { Some(substs) => {
ty::subst_tps(ccx.tcx, substs.tys, substs.self_ty, output_type) ty::subst_tps(ccx.tcx,
substs.tys.as_slice(),
substs.self_ty,
output_type)
} }
}; };
let uses_outptr = type_of::return_uses_outptr(ccx, substd_output_type); let uses_outptr = type_of::return_uses_outptr(ccx, substd_output_type);
@ -1255,7 +1271,7 @@ pub fn new_fn_ctxt<'a>(ccx: @CrateContext,
block_arena: block_arena, block_arena: block_arena,
ccx: ccx, ccx: ccx,
debug_context: debug_context, debug_context: debug_context,
scopes: RefCell::new(~[]) scopes: RefCell::new(Vec::new())
}; };
if has_env { if has_env {
@ -1289,7 +1305,7 @@ pub fn init_function<'a>(
None => output_type, None => output_type,
Some(substs) => { Some(substs) => {
ty::subst_tps(fcx.ccx.tcx, ty::subst_tps(fcx.ccx.tcx,
substs.tys, substs.tys.as_slice(),
substs.self_ty, substs.self_ty,
output_type) output_type)
} }
@ -1331,7 +1347,7 @@ pub type LvalueDatum = datum::Datum<datum::Lvalue>;
// appropriate lvalue datums. // appropriate lvalue datums.
pub fn create_datums_for_fn_args(fcx: &FunctionContext, pub fn create_datums_for_fn_args(fcx: &FunctionContext,
arg_tys: &[ty::t]) arg_tys: &[ty::t])
-> ~[RvalueDatum] { -> Vec<RvalueDatum> {
let _icx = push_ctxt("create_datums_for_fn_args"); let _icx = push_ctxt("create_datums_for_fn_args");
// Return an array wrapping the ValueRefs that we get from // Return an array wrapping the ValueRefs that we get from
@ -1348,7 +1364,7 @@ fn copy_args_to_allocas<'a>(fcx: &FunctionContext<'a>,
arg_scope: cleanup::CustomScopeIndex, arg_scope: cleanup::CustomScopeIndex,
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
args: &[ast::Arg], args: &[ast::Arg],
arg_datums: ~[RvalueDatum]) arg_datums: Vec<RvalueDatum> )
-> &'a Block<'a> { -> &'a Block<'a> {
debug!("copy_args_to_allocas"); debug!("copy_args_to_allocas");
@ -1472,7 +1488,7 @@ pub fn trans_closure<'a>(ccx: @CrateContext,
// Set up arguments to the function. // Set up arguments to the function.
let arg_tys = ty::ty_fn_args(node_id_type(bcx, id)); let arg_tys = ty::ty_fn_args(node_id_type(bcx, id));
let arg_datums = create_datums_for_fn_args(&fcx, arg_tys); let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.as_slice());
bcx = copy_args_to_allocas(&fcx, bcx = copy_args_to_allocas(&fcx,
arg_scope, arg_scope,
@ -1583,7 +1599,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: @CrateContext,
let no_substs: &[ty::t] = []; let no_substs: &[ty::t] = [];
let ty_param_substs = match param_substs { let ty_param_substs = match param_substs {
Some(ref substs) => { Some(ref substs) => {
let v: &[ty::t] = substs.tys; let v: &[ty::t] = substs.tys.as_slice();
v v
} }
None => { None => {
@ -1612,7 +1628,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: @CrateContext,
let arg_tys = ty::ty_fn_args(ctor_ty); let arg_tys = ty::ty_fn_args(ctor_ty);
let arg_datums = create_datums_for_fn_args(&fcx, arg_tys); let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.as_slice());
let bcx = fcx.entry_bcx.get().unwrap(); let bcx = fcx.entry_bcx.get().unwrap();
@ -1633,10 +1649,10 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: @CrateContext,
} }
pub fn trans_enum_def(ccx: @CrateContext, enum_definition: &ast::EnumDef, pub fn trans_enum_def(ccx: @CrateContext, enum_definition: &ast::EnumDef,
id: ast::NodeId, vi: @~[@ty::VariantInfo], id: ast::NodeId, vi: @Vec<@ty::VariantInfo> ,
i: &mut uint) { i: &mut uint) {
for &variant in enum_definition.variants.iter() { for &variant in enum_definition.variants.iter() {
let disr_val = vi[*i].disr_val; let disr_val = vi.get(*i).disr_val;
*i += 1; *i += 1;
match variant.node.kind { match variant.node.kind {
@ -1801,7 +1817,11 @@ fn register_fn(ccx: @CrateContext,
_ => fail!("expected bare rust fn or an intrinsic") _ => fail!("expected bare rust fn or an intrinsic")
}; };
let llfn = decl_rust_fn(ccx, false, f.sig.inputs, f.sig.output, sym); let llfn = decl_rust_fn(ccx,
false,
f.sig.inputs.as_slice(),
f.sig.output,
sym);
finish_register_fn(ccx, sp, sym, node_id, llfn); finish_register_fn(ccx, sp, sym, node_id, llfn);
llfn llfn
} }
@ -1876,25 +1896,27 @@ pub fn create_entry_wrapper(ccx: @CrateContext,
llvm::LLVMBuildPointerCast(bld, rust_main, Type::i8p().to_ref(), buf) llvm::LLVMBuildPointerCast(bld, rust_main, Type::i8p().to_ref(), buf)
}); });
~[ vec!(
opaque_rust_main, opaque_rust_main,
llvm::LLVMGetParam(llfn, 0), llvm::LLVMGetParam(llfn, 0),
llvm::LLVMGetParam(llfn, 1) llvm::LLVMGetParam(llfn, 1)
] )
}; };
(start_fn, args) (start_fn, args)
} else { } else {
debug!("using user-defined start fn"); debug!("using user-defined start fn");
let args = ~[ let args = vec!(
llvm::LLVMGetParam(llfn, 0 as c_uint), llvm::LLVMGetParam(llfn, 0 as c_uint),
llvm::LLVMGetParam(llfn, 1 as c_uint) llvm::LLVMGetParam(llfn, 1 as c_uint)
]; );
(rust_main, args) (rust_main, args)
}; };
let result = llvm::LLVMBuildCall(bld, start_fn, let result = llvm::LLVMBuildCall(bld,
args.as_ptr(), args.len() as c_uint, start_fn,
args.as_ptr(),
args.len() as c_uint,
noname()); noname());
llvm::LLVMBuildRet(bld, result); llvm::LLVMBuildRet(bld, result);
@ -2450,13 +2472,13 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) {
} }
}); });
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage); lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
let mut elts: ~[ValueRef] = ~[]; let mut elts: Vec<ValueRef> = Vec::new();
// This is not ideal, but the borrow checker doesn't // This is not ideal, but the borrow checker doesn't
// like the multiple borrows. At least, it doesn't // like the multiple borrows. At least, it doesn't
// like them on the current snapshot. (2013-06-14) // like them on the current snapshot. (2013-06-14)
let keys = { let keys = {
let mut keys = ~[]; let mut keys = Vec::new();
let module_data = ccx.module_data.borrow(); let module_data = ccx.module_data.borrow();
for (k, _) in module_data.get().iter() { for (k, _) in module_data.get().iter() {
keys.push(k.clone()); keys.push(k.clone());
@ -2476,7 +2498,7 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) {
elts.push(elt); elts.push(elt);
} }
unsafe { unsafe {
llvm::LLVMSetInitializer(map, C_array(elttype, elts)); llvm::LLVMSetInitializer(map, C_array(elttype, elts.as_slice()));
} }
return (map, keys.len()) return (map, keys.len())
} }
@ -2526,7 +2548,7 @@ pub fn decl_crate_map(sess: session::Session, mapmeta: LinkMeta,
} }
pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) { pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
let mut subcrates: ~[ValueRef] = ~[]; let mut subcrates: Vec<ValueRef> = Vec::new();
let mut i = 1; let mut i = 1;
let cstore = ccx.sess.cstore; let cstore = ccx.sess.cstore;
while cstore.have_crate_data(i) { while cstore.have_crate_data(i) {
@ -2564,7 +2586,8 @@ pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
}); });
lib::llvm::SetLinkage(vec_elements, lib::llvm::InternalLinkage); lib::llvm::SetLinkage(vec_elements, lib::llvm::InternalLinkage);
llvm::LLVMSetInitializer(vec_elements, C_array(ccx.int_type, subcrates)); llvm::LLVMSetInitializer(vec_elements,
C_array(ccx.int_type, subcrates.as_slice()));
let (mod_map, mod_count) = create_module_map(ccx); let (mod_map, mod_count) = create_module_map(ccx);
llvm::LLVMSetInitializer(map, C_struct( llvm::LLVMSetInitializer(map, C_struct(
@ -2600,11 +2623,11 @@ pub fn crate_ctxt_to_encode_parms<'r>(cx: &'r CrateContext, ie: encoder::EncodeI
} }
} }
pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> ~[u8] { pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> Vec<u8> {
use flate; use flate;
if !cx.sess.building_library.get() { if !cx.sess.building_library.get() {
return ~[] return Vec::new()
} }
let encode_inlined_item: encoder::EncodeInlinedItem = let encode_inlined_item: encoder::EncodeInlinedItem =
@ -2613,7 +2636,7 @@ pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> ~[u8] {
let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item); let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item);
let metadata = encoder::encode_metadata(encode_parms, krate); let metadata = encoder::encode_metadata(encode_parms, krate);
let compressed = encoder::metadata_encoding_version + let compressed = encoder::metadata_encoding_version +
flate::deflate_bytes(metadata).as_slice(); flate::deflate_bytes(metadata.as_slice()).as_slice();
let llmeta = C_bytes(compressed); let llmeta = C_bytes(compressed);
let llconst = C_struct([llmeta], false); let llconst = C_struct([llmeta], false);
let name = format!("rust_metadata_{}_{}_{}", cx.link_meta.crateid.name, let name = format!("rust_metadata_{}_{}_{}", cx.link_meta.crateid.name,
@ -2744,12 +2767,12 @@ pub fn trans_crate(sess: session::Session,
let link_meta = ccx.link_meta.clone(); let link_meta = ccx.link_meta.clone();
let llmod = ccx.llmod; let llmod = ccx.llmod;
let mut reachable = { let mut reachable: Vec<~str> = {
let reachable_map = ccx.reachable.borrow(); let reachable_map = ccx.reachable.borrow();
reachable_map.get().iter().filter_map(|id| { reachable_map.get().iter().filter_map(|id| {
let item_symbols = ccx.item_symbols.borrow(); let item_symbols = ccx.item_symbols.borrow();
item_symbols.get().find(id).map(|s| s.to_owned()) item_symbols.get().find(id).map(|s| s.to_owned())
}).to_owned_vec() }).collect()
}; };
// Make sure that some other crucial symbols are not eliminated from the // Make sure that some other crucial symbols are not eliminated from the

View File

@ -17,8 +17,10 @@ use middle::trans::base;
use middle::trans::common::*; use middle::trans::common::*;
use middle::trans::machine::llalign_of_pref; use middle::trans::machine::llalign_of_pref;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::libc::{c_uint, c_ulonglong, c_char};
use collections::HashMap; use collections::HashMap;
use std::libc::{c_uint, c_ulonglong, c_char};
use std::vec_ng::Vec;
use syntax::codemap::Span; use syntax::codemap::Span;
pub struct Builder<'a> { pub struct Builder<'a> {
@ -540,9 +542,9 @@ impl<'a> Builder<'a> {
} }
self.inbounds_gep(base, small_vec.slice(0, ixs.len())) self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
} else { } else {
let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<~[ValueRef]>(); let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<Vec<ValueRef> >();
self.count_insn("gepi"); self.count_insn("gepi");
self.inbounds_gep(base, v) self.inbounds_gep(base, v.as_slice())
} }
} }

View File

@ -16,6 +16,7 @@ use middle::trans::cabi_x86_64;
use middle::trans::cabi_arm; use middle::trans::cabi_arm;
use middle::trans::cabi_mips; use middle::trans::cabi_mips;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::vec_ng::Vec;
use syntax::abi::{X86, X86_64, Arm, Mips}; use syntax::abi::{X86, X86_64, Arm, Mips};
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
@ -83,7 +84,7 @@ impl ArgType {
/// comments are reverse-engineered and may be inaccurate. -NDM /// comments are reverse-engineered and may be inaccurate. -NDM
pub struct FnType { pub struct FnType {
/// The LLVM types of each argument. /// The LLVM types of each argument.
arg_tys: ~[ArgType], arg_tys: Vec<ArgType> ,
/// LLVM return type. /// LLVM return type.
ret_ty: ArgType, ret_ty: ArgType,

View File

@ -14,11 +14,11 @@ use lib::llvm::{llvm, Integer, Pointer, Float, Double, Struct, Array};
use lib::llvm::StructRetAttribute; use lib::llvm::StructRetAttribute;
use middle::trans::cabi::{FnType, ArgType}; use middle::trans::cabi::{FnType, ArgType};
use middle::trans::context::CrateContext; use middle::trans::context::CrateContext;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::cmp; use std::cmp;
use std::option::{None, Some}; use std::option::{None, Some};
use std::vec_ng::Vec;
fn align_up_to(off: uint, a: uint) -> uint { fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1u) / a * a; return (off + a - 1u) / a * a;
@ -131,7 +131,7 @@ pub fn compute_abi_info(_ccx: &CrateContext,
atys: &[Type], atys: &[Type],
rty: Type, rty: Type,
ret_def: bool) -> FnType { ret_def: bool) -> FnType {
let mut arg_tys = ~[]; let mut arg_tys = Vec::new();
for &aty in atys.iter() { for &aty in atys.iter() {
let ty = classify_arg_ty(aty); let ty = classify_arg_ty(aty);
arg_tys.push(ty); arg_tys.push(ty);

View File

@ -17,9 +17,10 @@ use lib::llvm::StructRetAttribute;
use middle::trans::context::CrateContext; use middle::trans::context::CrateContext;
use middle::trans::context::task_llcx; use middle::trans::context::task_llcx;
use middle::trans::cabi::*; use middle::trans::cabi::*;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::vec_ng::Vec;
fn align_up_to(off: uint, a: uint) -> uint { fn align_up_to(off: uint, a: uint) -> uint {
return (off + a - 1u) / a * a; return (off + a - 1u) / a * a;
} }
@ -132,9 +133,9 @@ fn padding_ty(align: uint, offset: uint) -> Option<Type> {
return None; return None;
} }
fn coerce_to_int(size: uint) -> ~[Type] { fn coerce_to_int(size: uint) -> Vec<Type> {
let int_ty = Type::i32(); let int_ty = Type::i32();
let mut args = ~[]; let mut args = Vec::new();
let mut n = size / 32; let mut n = size / 32;
while n > 0 { while n > 0 {
@ -155,7 +156,7 @@ fn coerce_to_int(size: uint) -> ~[Type] {
fn struct_ty(ty: Type) -> Type { fn struct_ty(ty: Type) -> Type {
let size = ty_size(ty) * 8; let size = ty_size(ty) * 8;
let fields = coerce_to_int(size); let fields = coerce_to_int(size);
return Type::struct_(fields, false); return Type::struct_(fields.as_slice(), false);
} }
pub fn compute_abi_info(_ccx: &CrateContext, pub fn compute_abi_info(_ccx: &CrateContext,
@ -169,7 +170,7 @@ pub fn compute_abi_info(_ccx: &CrateContext,
}; };
let sret = ret_ty.is_indirect(); let sret = ret_ty.is_indirect();
let mut arg_tys = ~[]; let mut arg_tys = Vec::new();
let mut offset = if sret { 4 } else { 0 }; let mut offset = if sret { 4 } else { 0 };
for aty in atys.iter() { for aty in atys.iter() {

View File

@ -15,12 +15,13 @@ use super::cabi::*;
use super::common::*; use super::common::*;
use super::machine::*; use super::machine::*;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::vec_ng::Vec;
pub fn compute_abi_info(ccx: &CrateContext, pub fn compute_abi_info(ccx: &CrateContext,
atys: &[Type], atys: &[Type],
rty: Type, rty: Type,
ret_def: bool) -> FnType { ret_def: bool) -> FnType {
let mut arg_tys = ~[]; let mut arg_tys = Vec::new();
let ret_ty; let ret_ty;
if !ret_def { if !ret_def {

View File

@ -18,11 +18,10 @@ use lib::llvm::{Struct, Array, Attribute};
use lib::llvm::{StructRetAttribute, ByValAttribute}; use lib::llvm::{StructRetAttribute, ByValAttribute};
use middle::trans::cabi::*; use middle::trans::cabi::*;
use middle::trans::context::CrateContext; use middle::trans::context::CrateContext;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::cmp; use std::cmp;
use std::vec; use std::vec_ng::Vec;
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
enum RegClass { enum RegClass {
@ -84,7 +83,7 @@ impl<'a> ClassList for &'a [RegClass] {
} }
} }
fn classify_ty(ty: Type) -> ~[RegClass] { fn classify_ty(ty: Type) -> Vec<RegClass> {
fn align(off: uint, ty: Type) -> uint { fn align(off: uint, ty: Type) -> uint {
let a = ty_align(ty); let a = ty_align(ty);
return (off + a - 1u) / a * a; return (off + a - 1u) / a * a;
@ -220,7 +219,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
unify(cls, ix + off / 8u, SSEDs); unify(cls, ix + off / 8u, SSEDs);
} }
Struct => { Struct => {
classify_struct(ty.field_types(), cls, ix, off); classify_struct(ty.field_types().as_slice(), cls, ix, off);
} }
Array => { Array => {
let len = ty.array_length(); let len = ty.array_length();
@ -282,13 +281,13 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
} }
let words = (ty_size(ty) + 7) / 8; let words = (ty_size(ty) + 7) / 8;
let mut cls = vec::from_elem(words, NoClass); let mut cls = Vec::from_elem(words, NoClass);
if words > 4 { if words > 4 {
all_mem(cls); all_mem(cls.as_mut_slice());
return cls; return cls;
} }
classify(ty, cls, 0, 0); classify(ty, cls.as_mut_slice(), 0, 0);
fixup(ty, cls); fixup(ty, cls.as_mut_slice());
return cls; return cls;
} }
@ -304,7 +303,7 @@ fn llreg_ty(cls: &[RegClass]) -> Type {
return len; return len;
} }
let mut tys = ~[]; let mut tys = Vec::new();
let mut i = 0u; let mut i = 0u;
let e = cls.len(); let e = cls.len();
while i < e { while i < e {
@ -329,7 +328,7 @@ fn llreg_ty(cls: &[RegClass]) -> Type {
} }
i += 1u; i += 1u;
} }
return Type::struct_(tys, false); return Type::struct_(tys.as_slice(), false);
} }
pub fn compute_abi_info(_ccx: &CrateContext, pub fn compute_abi_info(_ccx: &CrateContext,
@ -342,17 +341,20 @@ pub fn compute_abi_info(_ccx: &CrateContext,
-> ArgType { -> ArgType {
if !ty.is_reg_ty() { if !ty.is_reg_ty() {
let cls = classify_ty(ty); let cls = classify_ty(ty);
if is_mem_cls(cls) { if is_mem_cls(cls.as_slice()) {
ArgType::indirect(ty, Some(attr)) ArgType::indirect(ty, Some(attr))
} else { } else {
ArgType::direct(ty, Some(llreg_ty(cls)), None, None) ArgType::direct(ty,
Some(llreg_ty(cls.as_slice())),
None,
None)
} }
} else { } else {
ArgType::direct(ty, None, None, None) ArgType::direct(ty, None, None, None)
} }
} }
let mut arg_tys = ~[]; let mut arg_tys = Vec::new();
for t in atys.iter() { for t in atys.iter() {
let ty = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute); let ty = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute);
arg_tys.push(ty); arg_tys.push(ty);

View File

@ -48,6 +48,8 @@ use util::ppaux::Repr;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::vec_ng::Vec;
use std::vec_ng;
use syntax::ast; use syntax::ast;
use syntax::abi::AbiSet; use syntax::abi::AbiSet;
use syntax::ast_map; use syntax::ast_map;
@ -174,7 +176,12 @@ pub fn trans_fn_ref(bcx: &Block, def_id: ast::DefId,
debug!("trans_fn_ref(def_id={}, ref_id={:?}, type_params={}, vtables={})", debug!("trans_fn_ref(def_id={}, ref_id={:?}, type_params={}, vtables={})",
def_id.repr(bcx.tcx()), ref_id, type_params.repr(bcx.tcx()), def_id.repr(bcx.tcx()), ref_id, type_params.repr(bcx.tcx()),
vtables.repr(bcx.tcx())); vtables.repr(bcx.tcx()));
trans_fn_ref_with_vtables(bcx, def_id, ref_id, is_method, type_params, vtables) trans_fn_ref_with_vtables(bcx,
def_id,
ref_id,
is_method,
type_params.as_slice(),
vtables)
} }
fn trans_fn_ref_with_vtables_to_callee<'a>(bcx: &'a Block<'a>, fn trans_fn_ref_with_vtables_to_callee<'a>(bcx: &'a Block<'a>,
@ -218,10 +225,11 @@ fn resolve_default_method_vtables(bcx: &Block,
vtables.len() - num_method_vtables; vtables.len() - num_method_vtables;
vtables.tailn(num_impl_type_parameters).to_owned() vtables.tailn(num_impl_type_parameters).to_owned()
}, },
None => vec::from_elem(num_method_vtables, @~[]) None => vec::from_elem(num_method_vtables, @Vec::new())
}; };
let param_vtables = @(*trait_vtables_fixed + method_vtables); let param_vtables = @(vec_ng::append((*trait_vtables_fixed).clone(),
method_vtables));
let self_vtables = resolve_param_vtables_under_param_substs( let self_vtables = resolve_param_vtables_under_param_substs(
bcx.tcx(), param_substs, impl_res.self_vtables); bcx.tcx(), param_substs, impl_res.self_vtables);
@ -272,7 +280,7 @@ pub fn trans_fn_ref_with_vtables(
let substs = ty::substs { regions: ty::ErasedRegions, let substs = ty::substs { regions: ty::ErasedRegions,
self_ty: None, self_ty: None,
tps: /*bad*/ type_params.to_owned() }; tps: /*bad*/ Vec::from_slice(type_params) };
// Load the info for the appropriate trait if necessary. // Load the info for the appropriate trait if necessary.
match ty::trait_of_method(tcx, def_id) { match ty::trait_of_method(tcx, def_id) {
@ -640,7 +648,7 @@ pub fn trans_call_inner<'a>(
// written in opt_llretslot (if it is Some) or `llresult` will be // written in opt_llretslot (if it is Some) or `llresult` will be
// set appropriately (otherwise). // set appropriately (otherwise).
if is_rust_fn { if is_rust_fn {
let mut llargs = ~[]; let mut llargs = Vec::new();
// Push the out-pointer if we use an out-pointer for this // Push the out-pointer if we use an out-pointer for this
// return type, otherwise push "undef". // return type, otherwise push "undef".
@ -666,7 +674,7 @@ pub fn trans_call_inner<'a>(
// available, so we have to apply any attributes with ABI // available, so we have to apply any attributes with ABI
// implications directly to the call instruction. Right now, // implications directly to the call instruction. Right now,
// the only attribute we need to worry about is `sret`. // the only attribute we need to worry about is `sret`.
let mut attrs = ~[]; let mut attrs = Vec::new();
if type_of::return_uses_outptr(ccx, ret_ty) { if type_of::return_uses_outptr(ccx, ret_ty) {
attrs.push((1, StructRetAttribute)); attrs.push((1, StructRetAttribute));
} }
@ -683,7 +691,11 @@ pub fn trans_call_inner<'a>(
} }
// Invoke the actual rust fn and update bcx/llresult. // Invoke the actual rust fn and update bcx/llresult.
let (llret, b) = base::invoke(bcx, llfn, llargs, attrs, call_info); let (llret, b) = base::invoke(bcx,
llfn,
llargs,
attrs.as_slice(),
call_info);
bcx = b; bcx = b;
llresult = llret; llresult = llret;
@ -704,7 +716,7 @@ pub fn trans_call_inner<'a>(
// they are always Rust fns. // they are always Rust fns.
assert!(dest.is_some()); assert!(dest.is_some());
let mut llargs = ~[]; let mut llargs = Vec::new();
bcx = trans_args(bcx, args, callee_ty, &mut llargs, bcx = trans_args(bcx, args, callee_ty, &mut llargs,
cleanup::CustomScope(arg_cleanup_scope), false); cleanup::CustomScope(arg_cleanup_scope), false);
fcx.pop_custom_cleanup_scope(arg_cleanup_scope); fcx.pop_custom_cleanup_scope(arg_cleanup_scope);
@ -712,8 +724,12 @@ pub fn trans_call_inner<'a>(
ArgExprs(a) => a.iter().map(|x| expr_ty(bcx, *x)).collect(), ArgExprs(a) => a.iter().map(|x| expr_ty(bcx, *x)).collect(),
_ => fail!("expected arg exprs.") _ => fail!("expected arg exprs.")
}; };
bcx = foreign::trans_native_call(bcx, callee_ty, bcx = foreign::trans_native_call(bcx,
llfn, opt_llretslot.unwrap(), llargs, arg_tys); callee_ty,
llfn,
opt_llretslot.unwrap(),
llargs.as_slice(),
arg_tys);
} }
// If the caller doesn't care about the result of this fn call, // If the caller doesn't care about the result of this fn call,
@ -746,7 +762,7 @@ pub enum CallArgs<'a> {
fn trans_args<'a>(cx: &'a Block<'a>, fn trans_args<'a>(cx: &'a Block<'a>,
args: CallArgs, args: CallArgs,
fn_ty: ty::t, fn_ty: ty::t,
llargs: &mut ~[ValueRef], llargs: &mut Vec<ValueRef> ,
arg_cleanup_scope: cleanup::ScopeId, arg_cleanup_scope: cleanup::ScopeId,
ignore_self: bool) ignore_self: bool)
-> &'a Block<'a> { -> &'a Block<'a> {
@ -770,7 +786,7 @@ fn trans_args<'a>(cx: &'a Block<'a>,
assert!(variadic); assert!(variadic);
expr_ty_adjusted(cx, *arg_expr) expr_ty_adjusted(cx, *arg_expr)
} else { } else {
arg_tys[i] *arg_tys.get(i)
}; };
llargs.push(unpack_result!(bcx, { llargs.push(unpack_result!(bcx, {
trans_arg_expr(bcx, arg_ty, *arg_expr, trans_arg_expr(bcx, arg_ty, *arg_expr,
@ -783,7 +799,7 @@ fn trans_args<'a>(cx: &'a Block<'a>,
assert!(!variadic); assert!(!variadic);
llargs.push(unpack_result!(bcx, { llargs.push(unpack_result!(bcx, {
trans_arg_expr(bcx, arg_tys[0], arg_expr, trans_arg_expr(bcx, *arg_tys.get(0), arg_expr,
arg_cleanup_scope, arg_cleanup_scope,
DontAutorefArg) DontAutorefArg)
})); }));
@ -793,7 +809,7 @@ fn trans_args<'a>(cx: &'a Block<'a>,
assert_eq!(arg_tys.len(), 2); assert_eq!(arg_tys.len(), 2);
llargs.push(unpack_result!(bcx, { llargs.push(unpack_result!(bcx, {
trans_arg_expr(bcx, arg_tys[1], arg2_expr, trans_arg_expr(bcx, *arg_tys.get(1), arg2_expr,
arg_cleanup_scope, arg_cleanup_scope,
DoAutorefArg) DoAutorefArg)
})); }));

View File

@ -349,7 +349,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
assert!(self.is_valid_custom_scope(custom_scope)); assert!(self.is_valid_custom_scope(custom_scope));
let mut scopes = self.scopes.borrow_mut(); let mut scopes = self.scopes.borrow_mut();
let scope = &mut scopes.get()[custom_scope.index]; let scope = scopes.get().get_mut(custom_scope.index);
scope.cleanups.push(cleanup); scope.cleanups.push(cleanup);
scope.clear_cached_exits(); scope.clear_cached_exits();
} }
@ -433,7 +433,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool { fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool {
let scopes = self.scopes.borrow(); let scopes = self.scopes.borrow();
custom_scope.index < scopes.get().len() && custom_scope.index < scopes.get().len() &&
scopes.get()[custom_scope.index].kind.is_temp() scopes.get().get(custom_scope.index).kind.is_temp()
} }
fn trans_scope_cleanups(&self, // cannot borrow self, will recurse fn trans_scope_cleanups(&self, // cannot borrow self, will recurse

View File

@ -27,6 +27,7 @@ use util::ppaux::Repr;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use arena::TypedArena; use arena::TypedArena;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
@ -139,12 +140,12 @@ pub fn mk_closure_tys(tcx: ty::ctxt,
// is the actual types that will be stored in the map, not the // is the actual types that will be stored in the map, not the
// logical types as the user sees them, so by-ref upvars must be // logical types as the user sees them, so by-ref upvars must be
// converted to ptrs. // converted to ptrs.
let bound_tys = bound_values.map(|bv| { let bound_tys = bound_values.iter().map(|bv| {
match bv.action { match bv.action {
EnvCopy | EnvMove => bv.datum.ty, EnvCopy | EnvMove => bv.datum.ty,
EnvRef => ty::mk_mut_ptr(tcx, bv.datum.ty) EnvRef => ty::mk_mut_ptr(tcx, bv.datum.ty)
} }
}); }).collect();
let cdata_ty = ty::mk_tup(tcx, bound_tys); let cdata_ty = ty::mk_tup(tcx, bound_tys);
debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty)); debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty));
return cdata_ty; return cdata_ty;
@ -152,7 +153,7 @@ pub fn mk_closure_tys(tcx: ty::ctxt,
fn tuplify_box_ty(tcx: ty::ctxt, t: ty::t) -> ty::t { fn tuplify_box_ty(tcx: ty::ctxt, t: ty::t) -> ty::t {
let ptr = ty::mk_imm_ptr(tcx, ty::mk_i8()); let ptr = ty::mk_imm_ptr(tcx, ty::mk_i8());
ty::mk_tup(tcx, ~[ty::mk_uint(), ty::mk_nil_ptr(tcx), ptr, ptr, t]) ty::mk_tup(tcx, vec!(ty::mk_uint(), ty::mk_nil_ptr(tcx), ptr, ptr, t))
} }
fn allocate_cbox<'a>(bcx: &'a Block<'a>, fn allocate_cbox<'a>(bcx: &'a Block<'a>,
@ -191,7 +192,7 @@ pub struct ClosureResult<'a> {
// Otherwise, it is stack allocated and copies pointers to the upvars. // Otherwise, it is stack allocated and copies pointers to the upvars.
pub fn store_environment<'a>( pub fn store_environment<'a>(
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
bound_values: ~[EnvValue], bound_values: Vec<EnvValue> ,
sigil: ast::Sigil) sigil: ast::Sigil)
-> ClosureResult<'a> { -> ClosureResult<'a> {
let _icx = push_ctxt("closure::store_environment"); let _icx = push_ctxt("closure::store_environment");
@ -199,7 +200,7 @@ pub fn store_environment<'a>(
let tcx = ccx.tcx; let tcx = ccx.tcx;
// compute the type of the closure // compute the type of the closure
let cdata_ty = mk_closure_tys(tcx, bound_values); let cdata_ty = mk_closure_tys(tcx, bound_values.as_slice());
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
// tuple. This could be a ptr in uniq or a box or on stack, // tuple. This could be a ptr in uniq or a box or on stack,
@ -258,7 +259,7 @@ fn build_closure<'a>(bcx0: &'a Block<'a>,
let bcx = bcx0; let bcx = bcx0;
// Package up the captured upvars // Package up the captured upvars
let mut env_vals = ~[]; let mut env_vals = Vec::new();
for cap_var in cap_vars.iter() { for cap_var in cap_vars.iter() {
debug!("Building closure: captured variable {:?}", *cap_var); debug!("Building closure: captured variable {:?}", *cap_var);
let datum = expr::trans_local_var(bcx, cap_var.def); let datum = expr::trans_local_var(bcx, cap_var.def);
@ -387,7 +388,11 @@ pub fn trans_expr_fn<'a>(
let s = tcx.map.with_path(id, |path| { let s = tcx.map.with_path(id, |path| {
mangle_internal_name_by_path_and_seq(path, "closure") mangle_internal_name_by_path_and_seq(path, "closure")
}); });
let llfn = decl_internal_rust_fn(ccx, true, f.sig.inputs, f.sig.output, s); let llfn = decl_internal_rust_fn(ccx,
true,
f.sig.inputs.as_slice(),
f.sig.output,
s);
// set an inline hint for all closures // set an inline hint for all closures
set_inline_hint(llfn); set_inline_hint(llfn);
@ -396,11 +401,17 @@ pub fn trans_expr_fn<'a>(
let capture_map = ccx.maps.capture_map.borrow(); let capture_map = ccx.maps.capture_map.borrow();
capture_map.get().get_copy(&id) capture_map.get().get_copy(&id)
}; };
let ClosureResult {llbox, cdata_ty, bcx} = build_closure(bcx, *cap_vars.borrow(), sigil); let ClosureResult {llbox, cdata_ty, bcx} =
build_closure(bcx, cap_vars.borrow().as_slice(), sigil);
trans_closure(ccx, decl, body, llfn, trans_closure(ccx, decl, body, llfn,
bcx.fcx.param_substs, id, bcx.fcx.param_substs, id,
[], ty::ty_fn_ret(fty), [], ty::ty_fn_ret(fty),
|bcx| load_environment(bcx, cdata_ty, *cap_vars.borrow(), sigil)); |bcx| {
load_environment(bcx,
cdata_ty,
cap_vars.borrow().as_slice(),
sigil)
});
fill_fn_pair(bcx, dest_addr, llfn, llbox); fill_fn_pair(bcx, dest_addr, llfn, llbox);
bcx bcx
@ -447,9 +458,13 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
mangle_internal_name_by_path_and_seq(path, "as_closure") mangle_internal_name_by_path_and_seq(path, "as_closure")
}); });
let llfn = if is_local { let llfn = if is_local {
decl_internal_rust_fn(ccx, true, f.sig.inputs, f.sig.output, name) decl_internal_rust_fn(ccx,
true,
f.sig.inputs.as_slice(),
f.sig.output,
name)
} else { } else {
decl_rust_fn(ccx, true, f.sig.inputs, f.sig.output, name) decl_rust_fn(ccx, true, f.sig.inputs.as_slice(), f.sig.output, name)
}; };
{ {
@ -470,8 +485,10 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
init_function(&fcx, true, f.sig.output, None); init_function(&fcx, true, f.sig.output, None);
let bcx = fcx.entry_bcx.get().unwrap(); let bcx = fcx.entry_bcx.get().unwrap();
let args = create_datums_for_fn_args(&fcx, ty::ty_fn_args(closure_ty)); let args = create_datums_for_fn_args(&fcx,
let mut llargs = ~[]; ty::ty_fn_args(closure_ty)
.as_slice());
let mut llargs = Vec::new();
match fcx.llretptr.get() { match fcx.llretptr.get() {
Some(llretptr) => { Some(llretptr) => {
llargs.push(llretptr); llargs.push(llretptr);
@ -480,7 +497,7 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
} }
llargs.extend(&mut args.iter().map(|arg| arg.val)); llargs.extend(&mut args.iter().map(|arg| arg.val));
let retval = Call(bcx, fn_ptr, llargs, []); let retval = Call(bcx, fn_ptr, llargs.as_slice(), []);
if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() { if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
RetVoid(bcx); RetVoid(bcx);
} else { } else {

View File

@ -32,10 +32,11 @@ use util::ppaux::Repr;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
use arena::TypedArena; use arena::TypedArena;
use collections::HashMap;
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use collections::HashMap;
use std::libc::{c_uint, c_longlong, c_ulonglong, c_char}; use std::libc::{c_uint, c_longlong, c_ulonglong, c_char};
use std::vec_ng::Vec;
use syntax::ast::Ident; use syntax::ast::Ident;
use syntax::ast; use syntax::ast;
use syntax::ast_map::{PathElem, PathName}; use syntax::ast_map::{PathElem, PathName};
@ -50,8 +51,9 @@ fn type_is_newtype_immediate(ccx: &CrateContext, ty: ty::t) -> bool {
ty::ty_struct(def_id, ref substs) => { ty::ty_struct(def_id, ref substs) => {
let fields = ty::struct_fields(ccx.tcx, def_id, substs); let fields = ty::struct_fields(ccx.tcx, def_id, substs);
fields.len() == 1 && fields.len() == 1 &&
fields[0].ident.name == token::special_idents::unnamed_field.name && fields.get(0).ident.name ==
type_is_immediate(ccx, fields[0].mt.ty) token::special_idents::unnamed_field.name &&
type_is_immediate(ccx, fields.get(0).mt.ty)
} }
_ => false _ => false
} }
@ -161,7 +163,7 @@ pub struct Stats {
n_llvm_insns: Cell<uint>, n_llvm_insns: Cell<uint>,
llvm_insns: RefCell<HashMap<~str, uint>>, llvm_insns: RefCell<HashMap<~str, uint>>,
// (ident, time-in-ms, llvm-instructions) // (ident, time-in-ms, llvm-instructions)
fn_stats: RefCell<~[(~str, uint, uint)]>, fn_stats: RefCell<Vec<(~str, uint, uint)> >,
} }
pub struct BuilderRef_res { pub struct BuilderRef_res {
@ -187,7 +189,7 @@ pub type ExternMap = HashMap<~str, ValueRef>;
// Here `self_ty` is the real type of the self parameter to this method. It // Here `self_ty` is the real type of the self parameter to this method. It
// will only be set in the case of default methods. // will only be set in the case of default methods.
pub struct param_substs { pub struct param_substs {
tys: ~[ty::t], tys: Vec<ty::t> ,
self_ty: Option<ty::t>, self_ty: Option<ty::t>,
vtables: Option<typeck::vtable_res>, vtables: Option<typeck::vtable_res>,
self_vtables: Option<typeck::vtable_param_res> self_vtables: Option<typeck::vtable_param_res>
@ -285,7 +287,7 @@ pub struct FunctionContext<'a> {
debug_context: debuginfo::FunctionDebugContext, debug_context: debuginfo::FunctionDebugContext,
// Cleanup scopes. // Cleanup scopes.
scopes: RefCell<~[cleanup::CleanupScope<'a>]>, scopes: RefCell<Vec<cleanup::CleanupScope<'a>> >,
} }
impl<'a> FunctionContext<'a> { impl<'a> FunctionContext<'a> {
@ -639,7 +641,7 @@ pub fn C_binary_slice(cx: &CrateContext, data: &[u8]) -> ValueRef {
pub fn C_zero_byte_arr(size: uint) -> ValueRef { pub fn C_zero_byte_arr(size: uint) -> ValueRef {
unsafe { unsafe {
let mut i = 0u; let mut i = 0u;
let mut elts: ~[ValueRef] = ~[]; let mut elts: Vec<ValueRef> = Vec::new();
while i < size { elts.push(C_u8(0u)); i += 1u; } while i < size { elts.push(C_u8(0u)); i += 1u; }
return llvm::LLVMConstArray(Type::i8().to_ref(), return llvm::LLVMConstArray(Type::i8().to_ref(),
elts.as_ptr(), elts.len() as c_uint); elts.as_ptr(), elts.len() as c_uint);
@ -725,7 +727,7 @@ pub fn is_null(val: ValueRef) -> bool {
// Used to identify cached monomorphized functions and vtables // Used to identify cached monomorphized functions and vtables
#[deriving(Eq, Hash)] #[deriving(Eq, Hash)]
pub enum mono_param_id { pub enum mono_param_id {
mono_precise(ty::t, Option<@~[mono_id]>), mono_precise(ty::t, Option<@Vec<mono_id> >),
mono_any, mono_any,
mono_repr(uint /* size */, mono_repr(uint /* size */,
uint /* align */, uint /* align */,
@ -758,8 +760,7 @@ pub fn mono_data_classify(t: ty::t) -> MonoDataClass {
#[deriving(Eq, Hash)] #[deriving(Eq, Hash)]
pub struct mono_id_ { pub struct mono_id_ {
def: ast::DefId, def: ast::DefId,
params: ~[mono_param_id] params: Vec<mono_param_id> }
}
pub type mono_id = @mono_id_; pub type mono_id = @mono_id_;
@ -782,7 +783,7 @@ pub fn align_to(cx: &Block, off: ValueRef, align: ValueRef) -> ValueRef {
pub fn monomorphize_type(bcx: &Block, t: ty::t) -> ty::t { pub fn monomorphize_type(bcx: &Block, t: ty::t) -> ty::t {
match bcx.fcx.param_substs { match bcx.fcx.param_substs {
Some(substs) => { Some(substs) => {
ty::subst_tps(bcx.tcx(), substs.tys, substs.self_ty, t) ty::subst_tps(bcx.tcx(), substs.tys.as_slice(), substs.self_ty, t)
} }
_ => { _ => {
assert!(!ty::type_has_params(t)); assert!(!ty::type_has_params(t));
@ -808,7 +809,7 @@ pub fn expr_ty_adjusted(bcx: &Block, ex: &ast::Expr) -> ty::t {
monomorphize_type(bcx, t) monomorphize_type(bcx, t)
} }
pub fn node_id_type_params(bcx: &Block, id: ast::NodeId, is_method: bool) -> ~[ty::t] { pub fn node_id_type_params(bcx: &Block, id: ast::NodeId, is_method: bool) -> Vec<ty::t> {
let tcx = bcx.tcx(); let tcx = bcx.tcx();
let params = if is_method { let params = if is_method {
bcx.ccx().maps.method_map.borrow().get().get(&id).substs.tps.clone() bcx.ccx().maps.method_map.borrow().get().get(&id).substs.tps.clone()
@ -825,7 +826,7 @@ pub fn node_id_type_params(bcx: &Block, id: ast::NodeId, is_method: bool) -> ~[t
match bcx.fcx.param_substs { match bcx.fcx.param_substs {
Some(substs) => { Some(substs) => {
params.iter().map(|t| { params.iter().map(|t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t) ty::subst_tps(tcx, substs.tys.as_slice(), substs.self_ty, *t)
}).collect() }).collect()
} }
_ => params _ => params
@ -882,10 +883,13 @@ pub fn resolve_vtable_under_param_substs(tcx: ty::ctxt,
let tys = match param_substs { let tys = match param_substs {
Some(substs) => { Some(substs) => {
tys.iter().map(|t| { tys.iter().map(|t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t) ty::subst_tps(tcx,
substs.tys.as_slice(),
substs.self_ty,
*t)
}).collect() }).collect()
} }
_ => tys.to_owned() _ => Vec::from_slice(tys.as_slice())
}; };
typeck::vtable_static( typeck::vtable_static(
trait_id, tys, trait_id, tys,
@ -919,13 +923,13 @@ pub fn find_vtable(tcx: ty::ctxt,
typeck::param_numbered(n) => { typeck::param_numbered(n) => {
let tables = ps.vtables let tables = ps.vtables
.expect("vtables missing where they are needed"); .expect("vtables missing where they are needed");
tables[n] *tables.get(n)
} }
}; };
param_bounds[n_bound].clone() param_bounds.get(n_bound).clone()
} }
pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs { pub fn dummy_substs(tps: Vec<ty::t> ) -> ty::substs {
substs { substs {
regions: ty::ErasedRegions, regions: ty::ErasedRegions,
self_ty: None, self_ty: None,

View File

@ -25,15 +25,16 @@ use middle::trans::consts;
use middle::trans::expr; use middle::trans::expr;
use middle::trans::inline; use middle::trans::inline;
use middle::trans::machine; use middle::trans::machine;
use middle::trans::type_::Type;
use middle::trans::type_of; use middle::trans::type_of;
use middle::ty; use middle::ty;
use util::ppaux::{Repr, ty_to_str}; use util::ppaux::{Repr, ty_to_str};
use middle::trans::type_::Type;
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::libc::c_uint; use std::libc::c_uint;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use std::vec_ng;
use syntax::{ast, ast_util}; use syntax::{ast, ast_util};
pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
@ -302,8 +303,9 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
is_local: bool) -> (ValueRef, bool) { is_local: bool) -> (ValueRef, bool) {
let map_list = |exprs: &[@ast::Expr]| { let map_list = |exprs: &[@ast::Expr]| {
exprs.iter().map(|&e| const_expr(cx, e, is_local)) exprs.iter().map(|&e| const_expr(cx, e, is_local))
.fold((~[], true), |(l, all_inlineable), (val, inlineable)| { .fold((Vec::new(), true),
(vec::append_one(l, val), all_inlineable && inlineable) |(l, all_inlineable), (val, inlineable)| {
(vec_ng::append_one(l, val), all_inlineable && inlineable)
}) })
}; };
unsafe { unsafe {
@ -532,7 +534,7 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
let repr = adt::represent_type(cx, ety); let repr = adt::represent_type(cx, ety);
let (vals, inlineable) = map_list(es.as_slice()); let (vals, inlineable) = map_list(es.as_slice());
(adt::trans_const(cx, repr, 0, vals), inlineable) (adt::trans_const(cx, repr, 0, vals.as_slice()), inlineable)
} }
ast::ExprStruct(_, ref fs, ref base_opt) => { ast::ExprStruct(_, ref fs, ref base_opt) => {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
@ -666,7 +668,8 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
let repr = adt::represent_type(cx, ety); let repr = adt::represent_type(cx, ety);
let (arg_vals, inlineable) = map_list(args.as_slice()); let (arg_vals, inlineable) = map_list(args.as_slice());
(adt::trans_const(cx, repr, 0, arg_vals), inlineable) (adt::trans_const(cx, repr, 0, arg_vals.as_slice()),
inlineable)
} }
Some(ast::DefVariant(enum_did, variant_did, _)) => { Some(ast::DefVariant(enum_did, variant_did, _)) => {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
@ -675,8 +678,10 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
enum_did, enum_did,
variant_did); variant_did);
let (arg_vals, inlineable) = map_list(args.as_slice()); let (arg_vals, inlineable) = map_list(args.as_slice());
(adt::trans_const(cx, repr, vinfo.disr_val, arg_vals), (adt::trans_const(cx,
inlineable) repr,
vinfo.disr_val,
arg_vals.as_slice()), inlineable)
} }
_ => cx.sess.span_bug(e.span, "expected a struct or variant def") _ => cx.sess.span_bug(e.span, "expected a struct or variant def")
} }

View File

@ -33,6 +33,7 @@ use std::cell::{Cell, RefCell};
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::local_data; use std::local_data;
use std::libc::c_uint; use std::libc::c_uint;
use std::vec_ng::Vec;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString; use syntax::parse::token::InternedString;
@ -226,7 +227,7 @@ impl CrateContext {
n_closures: Cell::new(0u), n_closures: Cell::new(0u),
n_llvm_insns: Cell::new(0u), n_llvm_insns: Cell::new(0u),
llvm_insns: RefCell::new(HashMap::new()), llvm_insns: RefCell::new(HashMap::new()),
fn_stats: RefCell::new(~[]), fn_stats: RefCell::new(Vec::new()),
}, },
tydesc_type: tydesc_type, tydesc_type: tydesc_type,
int_type: int_type, int_type: int_type,
@ -250,7 +251,7 @@ impl CrateContext {
indices: &[uint]) -> ValueRef { indices: &[uint]) -> ValueRef {
debug!("const_inbounds_gepi: pointer={} indices={:?}", debug!("const_inbounds_gepi: pointer={} indices={:?}",
self.tn.val_to_str(pointer), indices); self.tn.val_to_str(pointer), indices);
let v: ~[ValueRef] = let v: Vec<ValueRef> =
indices.iter().map(|i| C_i32(*i as i32)).collect(); indices.iter().map(|i| C_i32(*i as i32)).collect();
unsafe { unsafe {
llvm::LLVMConstInBoundsGEP(pointer, llvm::LLVMConstInBoundsGEP(pointer,

View File

@ -341,9 +341,12 @@ pub fn trans_fail<'a>(
let v_line = loc.line as int; let v_line = loc.line as int;
let v_str = PointerCast(bcx, v_fail_str, Type::i8p()); let v_str = PointerCast(bcx, v_fail_str, Type::i8p());
let v_filename = PointerCast(bcx, v_filename, Type::i8p()); let v_filename = PointerCast(bcx, v_filename, Type::i8p());
let args = ~[v_str, v_filename, C_int(ccx, v_line)]; let args = vec!(v_str, v_filename, C_int(ccx, v_line));
let did = langcall(bcx, Some(sp), "", FailFnLangItem); let did = langcall(bcx, Some(sp), "", FailFnLangItem);
let bcx = callee::trans_lang_call(bcx, did, args, Some(expr::Ignore)).bcx; let bcx = callee::trans_lang_call(bcx,
did,
args.as_slice(),
Some(expr::Ignore)).bcx;
Unreachable(bcx); Unreachable(bcx);
return bcx; return bcx;
} }
@ -356,9 +359,12 @@ pub fn trans_fail_bounds_check<'a>(
-> &'a Block<'a> { -> &'a Block<'a> {
let _icx = push_ctxt("trans_fail_bounds_check"); let _icx = push_ctxt("trans_fail_bounds_check");
let (filename, line) = filename_and_line_num_from_span(bcx, sp); let (filename, line) = filename_and_line_num_from_span(bcx, sp);
let args = ~[filename, line, index, len]; let args = vec!(filename, line, index, len);
let did = langcall(bcx, Some(sp), "", FailBoundsCheckFnLangItem); let did = langcall(bcx, Some(sp), "", FailBoundsCheckFnLangItem);
let bcx = callee::trans_lang_call(bcx, did, args, Some(expr::Ignore)).bcx; let bcx = callee::trans_lang_call(bcx,
did,
args.as_slice(),
Some(expr::Ignore)).bcx;
Unreachable(bcx); Unreachable(bcx);
return bcx; return bcx;
} }

View File

@ -149,6 +149,7 @@ use std::libc::{c_uint, c_ulonglong, c_longlong};
use std::ptr; use std::ptr;
use std::sync::atomics; use std::sync::atomics;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use syntax::codemap::{Span, Pos}; use syntax::codemap::{Span, Pos};
use syntax::{abi, ast, codemap, ast_util, ast_map, opt_vec}; use syntax::{abi, ast, codemap, ast_util, ast_map, opt_vec};
use syntax::parse::token; use syntax::parse::token;
@ -177,7 +178,7 @@ pub struct CrateDebugContext {
priv current_debug_location: Cell<DebugLocation>, priv current_debug_location: Cell<DebugLocation>,
priv created_files: RefCell<HashMap<~str, DIFile>>, priv created_files: RefCell<HashMap<~str, DIFile>>,
priv created_types: RefCell<HashMap<uint, DIType>>, priv created_types: RefCell<HashMap<uint, DIType>>,
priv namespace_map: RefCell<HashMap<~[ast::Name], @NamespaceTreeNode>>, priv namespace_map: RefCell<HashMap<Vec<ast::Name> , @NamespaceTreeNode>>,
// This collection is used to assert that composite types (structs, enums, ...) have their // This collection is used to assert that composite types (structs, enums, ...) have their
// members only set once: // members only set once:
priv composite_types_completed: RefCell<HashSet<DIType>>, priv composite_types_completed: RefCell<HashSet<DIType>>,
@ -725,7 +726,10 @@ pub fn create_function_debug_context(cx: &CrateContext,
let return_type = match param_substs { let return_type = match param_substs {
None => return_type, None => return_type,
Some(substs) => { Some(substs) => {
ty::subst_tps(cx.tcx, substs.tys, substs.self_ty, return_type) ty::subst_tps(cx.tcx,
substs.tys.as_slice(),
substs.self_ty,
return_type)
} }
}; };
@ -740,7 +744,10 @@ pub fn create_function_debug_context(cx: &CrateContext,
let arg_type = match param_substs { let arg_type = match param_substs {
None => arg_type, None => arg_type,
Some(substs) => { Some(substs) => {
ty::subst_tps(cx.tcx, substs.tys, substs.self_ty, arg_type) ty::subst_tps(cx.tcx,
substs.tys.as_slice(),
substs.self_ty,
arg_type)
} }
}; };
@ -771,7 +778,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
name_to_append_suffix_to.push_char('<'); name_to_append_suffix_to.push_char('<');
// The list to be filled with template parameters: // The list to be filled with template parameters:
let mut template_params: ~[DIDescriptor] = vec::with_capacity(generics.ty_params.len() + 1); let mut template_params: Vec<DIDescriptor> =
Vec::with_capacity(generics.ty_params.len() + 1);
// Handle self type // Handle self type
if has_self_type { if has_self_type {
@ -814,12 +822,12 @@ pub fn create_function_debug_context(cx: &CrateContext,
let actual_types = match param_substs { let actual_types = match param_substs {
Some(param_substs) => &param_substs.tys, Some(param_substs) => &param_substs.tys,
None => { None => {
return create_DIArray(DIB(cx), template_params); return create_DIArray(DIB(cx), template_params.as_slice());
} }
}; };
for (index, &ast::TyParam{ ident: ident, .. }) in generics.ty_params.iter().enumerate() { for (index, &ast::TyParam{ ident: ident, .. }) in generics.ty_params.iter().enumerate() {
let actual_type = actual_types[index]; let actual_type = *actual_types.get(index);
// Add actual type name to <...> clause of function name // Add actual type name to <...> clause of function name
let actual_type_name = ppaux::ty_to_str(cx.tcx, actual_type); let actual_type_name = ppaux::ty_to_str(cx.tcx, actual_type);
name_to_append_suffix_to.push_str(actual_type_name); name_to_append_suffix_to.push_str(actual_type_name);
@ -850,7 +858,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
name_to_append_suffix_to.push_char('>'); name_to_append_suffix_to.push_char('>');
return create_DIArray(DIB(cx), template_params); return create_DIArray(DIB(cx), template_params.as_slice());
} }
} }
@ -1136,7 +1144,7 @@ enum MemberDescriptionFactory {
impl MemberDescriptionFactory { impl MemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext) fn create_member_descriptions(&self, cx: &CrateContext)
-> ~[MemberDescription] { -> Vec<MemberDescription> {
match *self { match *self {
StructMD(ref this) => { StructMD(ref this) => {
this.create_member_descriptions(cx) this.create_member_descriptions(cx)
@ -1155,13 +1163,13 @@ impl MemberDescriptionFactory {
} }
struct StructMemberDescriptionFactory { struct StructMemberDescriptionFactory {
fields: ~[ty::field], fields: Vec<ty::field> ,
span: Span, span: Span,
} }
impl StructMemberDescriptionFactory { impl StructMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext) fn create_member_descriptions(&self, cx: &CrateContext)
-> ~[MemberDescription] { -> Vec<MemberDescription> {
self.fields.map(|field| { self.fields.map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name { let name = if field.ident.name == special_idents::unnamed_field.name {
~"" ~""
@ -1250,7 +1258,7 @@ impl RecursiveTypeDescription {
set_members_of_composite_type(cx, set_members_of_composite_type(cx,
metadata_stub, metadata_stub,
llvm_type, llvm_type,
member_descriptions, member_descriptions.as_slice(),
file_metadata, file_metadata,
codemap::DUMMY_SP); codemap::DUMMY_SP);
return metadata_stub; return metadata_stub;
@ -1260,13 +1268,13 @@ impl RecursiveTypeDescription {
} }
struct TupleMemberDescriptionFactory { struct TupleMemberDescriptionFactory {
component_types: ~[ty::t], component_types: Vec<ty::t> ,
span: Span, span: Span,
} }
impl TupleMemberDescriptionFactory { impl TupleMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext) fn create_member_descriptions(&self, cx: &CrateContext)
-> ~[MemberDescription] { -> Vec<MemberDescription> {
self.component_types.map(|&component_type| { self.component_types.map(|&component_type| {
MemberDescription { MemberDescription {
name: ~"", name: ~"",
@ -1300,7 +1308,7 @@ fn prepare_tuple_metadata(cx: &CrateContext,
llvm_type: tuple_llvm_type, llvm_type: tuple_llvm_type,
file_metadata: file_metadata, file_metadata: file_metadata,
member_description_factory: TupleMD(TupleMemberDescriptionFactory { member_description_factory: TupleMD(TupleMemberDescriptionFactory {
component_types: component_types.to_owned(), component_types: Vec::from_slice(component_types),
span: span, span: span,
}) })
} }
@ -1308,7 +1316,7 @@ fn prepare_tuple_metadata(cx: &CrateContext,
struct GeneralMemberDescriptionFactory { struct GeneralMemberDescriptionFactory {
type_rep: @adt::Repr, type_rep: @adt::Repr,
variants: @~[@ty::VariantInfo], variants: @Vec<@ty::VariantInfo> ,
discriminant_type_metadata: ValueRef, discriminant_type_metadata: ValueRef,
containing_scope: DIScope, containing_scope: DIScope,
file_metadata: DIFile, file_metadata: DIFile,
@ -1317,7 +1325,7 @@ struct GeneralMemberDescriptionFactory {
impl GeneralMemberDescriptionFactory { impl GeneralMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext) fn create_member_descriptions(&self, cx: &CrateContext)
-> ~[MemberDescription] { -> Vec<MemberDescription> {
// Capture type_rep, so we don't have to copy the struct_defs array // Capture type_rep, so we don't have to copy the struct_defs array
let struct_defs = match *self.type_rep { let struct_defs = match *self.type_rep {
adt::General(_, ref struct_defs) => struct_defs, adt::General(_, ref struct_defs) => struct_defs,
@ -1331,7 +1339,7 @@ impl GeneralMemberDescriptionFactory {
let (variant_type_metadata, variant_llvm_type, member_desc_factory) = let (variant_type_metadata, variant_llvm_type, member_desc_factory) =
describe_enum_variant(cx, describe_enum_variant(cx,
struct_def, struct_def,
self.variants[i], *self.variants.get(i),
Some(self.discriminant_type_metadata), Some(self.discriminant_type_metadata),
self.containing_scope, self.containing_scope,
self.file_metadata, self.file_metadata,
@ -1343,7 +1351,7 @@ impl GeneralMemberDescriptionFactory {
set_members_of_composite_type(cx, set_members_of_composite_type(cx,
variant_type_metadata, variant_type_metadata,
variant_llvm_type, variant_llvm_type,
member_descriptions, member_descriptions.as_slice(),
self.file_metadata, self.file_metadata,
codemap::DUMMY_SP); codemap::DUMMY_SP);
MemberDescription { MemberDescription {
@ -1357,14 +1365,14 @@ impl GeneralMemberDescriptionFactory {
} }
struct EnumVariantMemberDescriptionFactory { struct EnumVariantMemberDescriptionFactory {
args: ~[(~str, ty::t)], args: Vec<(~str, ty::t)> ,
discriminant_type_metadata: Option<DIType>, discriminant_type_metadata: Option<DIType>,
span: Span, span: Span,
} }
impl EnumVariantMemberDescriptionFactory { impl EnumVariantMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext) fn create_member_descriptions(&self, cx: &CrateContext)
-> ~[MemberDescription] { -> Vec<MemberDescription> {
self.args.iter().enumerate().map(|(i, &(ref name, ty))| { self.args.iter().enumerate().map(|(i, &(ref name, ty))| {
MemberDescription { MemberDescription {
name: name.to_str(), name: name.to_str(),
@ -1387,8 +1395,11 @@ fn describe_enum_variant(cx: &CrateContext,
file_metadata: DIFile, file_metadata: DIFile,
span: Span) span: Span)
-> (DICompositeType, Type, MemberDescriptionFactory) { -> (DICompositeType, Type, MemberDescriptionFactory) {
let variant_llvm_type = Type::struct_(struct_def.fields.map(|&t| type_of::type_of(cx, t)), let variant_llvm_type =
struct_def.packed); Type::struct_(struct_def.fields
.map(|&t| type_of::type_of(cx, t))
.as_slice(),
struct_def.packed);
// Could some consistency checks here: size, align, field count, discr type // Could some consistency checks here: size, align, field count, discr type
// Find the source code location of the variant's definition // Find the source code location of the variant's definition
@ -1420,7 +1431,7 @@ fn describe_enum_variant(cx: &CrateContext,
} }
// Build an array of (field name, field type) pairs to be captured in the factory closure. // Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: ~[(~str, ty::t)] = arg_names.iter() let args: Vec<(~str, ty::t)> = arg_names.iter()
.zip(struct_def.fields.iter()) .zip(struct_def.fields.iter())
.map(|(s, &t)| (s.to_str(), t)) .map(|(s, &t)| (s.to_str(), t))
.collect(); .collect();
@ -1462,7 +1473,7 @@ fn prepare_enum_metadata(cx: &CrateContext,
let variants = ty::enum_variants(cx.tcx, enum_def_id); let variants = ty::enum_variants(cx.tcx, enum_def_id);
let enumerators_metadata: ~[DIDescriptor] = variants let enumerators_metadata: Vec<DIDescriptor> = variants
.iter() .iter()
.map(|v| { .map(|v| {
token::get_ident(v.name).get().with_c_str(|name| { token::get_ident(v.name).get().with_c_str(|name| {
@ -1491,7 +1502,7 @@ fn prepare_enum_metadata(cx: &CrateContext,
loc.line as c_uint, loc.line as c_uint,
bytes_to_bits(discriminant_size), bytes_to_bits(discriminant_size),
bytes_to_bits(discriminant_align), bytes_to_bits(discriminant_align),
create_DIArray(DIB(cx), enumerators_metadata), create_DIArray(DIB(cx), enumerators_metadata.as_slice()),
discriminant_base_type_metadata) discriminant_base_type_metadata)
} }
}) })
@ -1507,13 +1518,14 @@ fn prepare_enum_metadata(cx: &CrateContext,
assert!(variants.len() == 1); assert!(variants.len() == 1);
let (metadata_stub, let (metadata_stub,
variant_llvm_type, variant_llvm_type,
member_description_factory) = describe_enum_variant(cx, member_description_factory) =
struct_def, describe_enum_variant(cx,
variants[0], struct_def,
None, *variants.get(0),
containing_scope, None,
file_metadata, containing_scope,
span); file_metadata,
span);
UnfinishedMetadata { UnfinishedMetadata {
cache_id: cache_id_for_type(enum_type), cache_id: cache_id_for_type(enum_type),
metadata_stub: metadata_stub, metadata_stub: metadata_stub,
@ -1565,13 +1577,14 @@ fn prepare_enum_metadata(cx: &CrateContext,
adt::NullablePointer { nonnull: ref struct_def, nndiscr, .. } => { adt::NullablePointer { nonnull: ref struct_def, nndiscr, .. } => {
let (metadata_stub, let (metadata_stub,
variant_llvm_type, variant_llvm_type,
member_description_factory) = describe_enum_variant(cx, member_description_factory) =
struct_def, describe_enum_variant(cx,
variants[nndiscr], struct_def,
None, *variants.get(nndiscr as uint),
containing_scope, None,
file_metadata, containing_scope,
span); file_metadata,
span);
UnfinishedMetadata { UnfinishedMetadata {
cache_id: cache_id_for_type(enum_type), cache_id: cache_id_for_type(enum_type),
metadata_stub: metadata_stub, metadata_stub: metadata_stub,
@ -1650,7 +1663,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
let loc = span_start(cx, definition_span); let loc = span_start(cx, definition_span);
let member_metadata: ~[DIDescriptor] = member_descriptions let member_metadata: Vec<DIDescriptor> = member_descriptions
.iter() .iter()
.enumerate() .enumerate()
.map(|(i, member_description)| { .map(|(i, member_description)| {
@ -1679,7 +1692,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
.collect(); .collect();
unsafe { unsafe {
let type_array = create_DIArray(DIB(cx), member_metadata); let type_array = create_DIArray(DIB(cx), member_metadata.as_slice());
llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array); llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array);
} }
} }
@ -1739,7 +1752,9 @@ fn boxed_type_metadata(cx: &CrateContext,
let box_llvm_type = Type::at_box(cx, content_llvm_type); let box_llvm_type = Type::at_box(cx, content_llvm_type);
let member_llvm_types = box_llvm_type.field_types(); let member_llvm_types = box_llvm_type.field_types();
assert!(box_layout_is_correct(cx, member_llvm_types, content_llvm_type)); assert!(box_layout_is_correct(cx,
member_llvm_types.as_slice(),
content_llvm_type));
let int_type = ty::mk_int(); let int_type = ty::mk_int();
let nil_pointer_type = ty::mk_nil_ptr(cx.tcx); let nil_pointer_type = ty::mk_nil_ptr(cx.tcx);
@ -1748,31 +1763,31 @@ fn boxed_type_metadata(cx: &CrateContext,
let member_descriptions = [ let member_descriptions = [
MemberDescription { MemberDescription {
name: ~"refcnt", name: ~"refcnt",
llvm_type: member_llvm_types[0], llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP), type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP),
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: ~"drop_glue", name: ~"drop_glue",
llvm_type: member_llvm_types[1], llvm_type: *member_llvm_types.get(1),
type_metadata: nil_pointer_type_metadata, type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: ~"prev", name: ~"prev",
llvm_type: member_llvm_types[2], llvm_type: *member_llvm_types.get(2),
type_metadata: nil_pointer_type_metadata, type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: ~"next", name: ~"next",
llvm_type: member_llvm_types[3], llvm_type: *member_llvm_types.get(3),
type_metadata: nil_pointer_type_metadata, type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: ~"val", name: ~"val",
llvm_type: member_llvm_types[4], llvm_type: *member_llvm_types.get(4),
type_metadata: content_type_metadata, type_metadata: content_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
} }
@ -1859,19 +1874,19 @@ fn vec_metadata(cx: &CrateContext,
let member_descriptions = [ let member_descriptions = [
MemberDescription { MemberDescription {
name: ~"fill", name: ~"fill",
llvm_type: member_llvm_types[0], llvm_type: *member_llvm_types.get(0),
type_metadata: int_type_metadata, type_metadata: int_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: ~"alloc", name: ~"alloc",
llvm_type: member_llvm_types[1], llvm_type: *member_llvm_types.get(1),
type_metadata: int_type_metadata, type_metadata: int_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: ~"elements", name: ~"elements",
llvm_type: member_llvm_types[2], llvm_type: *member_llvm_types.get(2),
type_metadata: array_type_metadata, type_metadata: array_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
} }
@ -1904,20 +1919,22 @@ fn vec_slice_metadata(cx: &CrateContext,
let slice_type_name = ppaux::ty_to_str(cx.tcx, vec_type); let slice_type_name = ppaux::ty_to_str(cx.tcx, vec_type);
let member_llvm_types = slice_llvm_type.field_types(); let member_llvm_types = slice_llvm_type.field_types();
assert!(slice_layout_is_correct(cx, member_llvm_types, element_type)); assert!(slice_layout_is_correct(cx,
member_llvm_types.as_slice(),
element_type));
let data_ptr_type = ty::mk_ptr(cx.tcx, ty::mt { ty: element_type, mutbl: ast::MutImmutable }); let data_ptr_type = ty::mk_ptr(cx.tcx, ty::mt { ty: element_type, mutbl: ast::MutImmutable });
let member_descriptions = [ let member_descriptions = [
MemberDescription { MemberDescription {
name: ~"data_ptr", name: ~"data_ptr",
llvm_type: member_llvm_types[0], llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, data_ptr_type, span), type_metadata: type_metadata(cx, data_ptr_type, span),
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: ~"length", name: ~"length",
llvm_type: member_llvm_types[1], llvm_type: *member_llvm_types.get(1),
type_metadata: type_metadata(cx, ty::mk_uint(), span), type_metadata: type_metadata(cx, ty::mk_uint(), span),
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
@ -1954,7 +1971,8 @@ fn subroutine_type_metadata(cx: &CrateContext,
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name);
let mut signature_metadata: ~[DIType] = vec::with_capacity(signature.inputs.len() + 1); let mut signature_metadata: Vec<DIType> =
Vec::with_capacity(signature.inputs.len() + 1);
// return type // return type
signature_metadata.push(match ty::get(signature.output).sty { signature_metadata.push(match ty::get(signature.output).sty {
@ -1971,7 +1989,7 @@ fn subroutine_type_metadata(cx: &CrateContext,
llvm::LLVMDIBuilderCreateSubroutineType( llvm::LLVMDIBuilderCreateSubroutineType(
DIB(cx), DIB(cx),
file_metadata, file_metadata,
create_DIArray(DIB(cx), signature_metadata)) create_DIArray(DIB(cx), signature_metadata.as_slice()))
}; };
} }
@ -1993,7 +2011,7 @@ fn trait_metadata(cx: &CrateContext,
ident_string.get(); ident_string.get();
// Add type and region parameters // Add type and region parameters
let name = ppaux::parameterized(cx.tcx, name, &substs.regions, let name = ppaux::parameterized(cx.tcx, name, &substs.regions,
substs.tps, def_id, true); substs.tps.as_slice(), def_id, true);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id); let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
@ -2121,7 +2139,10 @@ fn type_metadata(cx: &CrateContext,
} }
}, },
ty::ty_tup(ref elements) => { ty::ty_tup(ref elements) => {
prepare_tuple_metadata(cx, t, *elements, usage_site_span).finalize(cx) prepare_tuple_metadata(cx,
t,
elements.as_slice(),
usage_site_span).finalize(cx)
} }
_ => cx.sess.bug(format!("debuginfo: unexpected type in type_metadata: {:?}", sty)) _ => cx.sess.bug(format!("debuginfo: unexpected type in type_metadata: {:?}", sty))
}; };
@ -2265,7 +2286,7 @@ fn populate_scope_map(cx: &CrateContext,
ident: Option<ast::Ident> ident: Option<ast::Ident>
} }
let mut scope_stack = ~[ScopeStackEntry { scope_metadata: fn_metadata, ident: None }]; let mut scope_stack = vec!(ScopeStackEntry { scope_metadata: fn_metadata, ident: None });
// Push argument identifiers onto the stack so arguments integrate nicely with variable // Push argument identifiers onto the stack so arguments integrate nicely with variable
// shadowing. // shadowing.
@ -2288,10 +2309,10 @@ fn populate_scope_map(cx: &CrateContext,
// local helper functions for walking the AST. // local helper functions for walking the AST.
fn with_new_scope(cx: &CrateContext, fn with_new_scope(cx: &CrateContext,
scope_span: Span, scope_span: Span,
scope_stack: &mut ~[ScopeStackEntry], scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>, scope_map: &mut HashMap<ast::NodeId, DIScope>,
inner_walk: |&CrateContext, inner_walk: |&CrateContext,
&mut ~[ScopeStackEntry], &mut Vec<ScopeStackEntry> ,
&mut HashMap<ast::NodeId, DIScope>|) { &mut HashMap<ast::NodeId, DIScope>|) {
// Create a new lexical scope and push it onto the stack // Create a new lexical scope and push it onto the stack
let loc = cx.sess.codemap.lookup_char_pos(scope_span.lo); let loc = cx.sess.codemap.lookup_char_pos(scope_span.lo);
@ -2325,7 +2346,7 @@ fn populate_scope_map(cx: &CrateContext,
fn walk_block(cx: &CrateContext, fn walk_block(cx: &CrateContext,
block: &ast::Block, block: &ast::Block,
scope_stack: &mut ~[ScopeStackEntry], scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) { scope_map: &mut HashMap<ast::NodeId, DIScope>) {
scope_map.insert(block.id, scope_stack.last().unwrap().scope_metadata); scope_map.insert(block.id, scope_stack.last().unwrap().scope_metadata);
@ -2349,7 +2370,7 @@ fn populate_scope_map(cx: &CrateContext,
fn walk_decl(cx: &CrateContext, fn walk_decl(cx: &CrateContext,
decl: &ast::Decl, decl: &ast::Decl,
scope_stack: &mut ~[ScopeStackEntry], scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) { scope_map: &mut HashMap<ast::NodeId, DIScope>) {
match *decl { match *decl {
codemap::Spanned { node: ast::DeclLocal(local), .. } => { codemap::Spanned { node: ast::DeclLocal(local), .. } => {
@ -2367,7 +2388,7 @@ fn populate_scope_map(cx: &CrateContext,
fn walk_pattern(cx: &CrateContext, fn walk_pattern(cx: &CrateContext,
pat: @ast::Pat, pat: @ast::Pat,
scope_stack: &mut ~[ScopeStackEntry], scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) { scope_map: &mut HashMap<ast::NodeId, DIScope>) {
let def_map = cx.tcx.def_map; let def_map = cx.tcx.def_map;
@ -2512,7 +2533,7 @@ fn populate_scope_map(cx: &CrateContext,
fn walk_expr(cx: &CrateContext, fn walk_expr(cx: &CrateContext,
exp: &ast::Expr, exp: &ast::Expr,
scope_stack: &mut ~[ScopeStackEntry], scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) { scope_map: &mut HashMap<ast::NodeId, DIScope>) {
scope_map.insert(exp.id, scope_stack.last().unwrap().scope_metadata); scope_map.insert(exp.id, scope_stack.last().unwrap().scope_metadata);
@ -2741,7 +2762,7 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> @NamespaceTreeNo
}; };
let mut path = krate.move_iter().chain(path).peekable(); let mut path = krate.move_iter().chain(path).peekable();
let mut current_key = ~[]; let mut current_key = Vec::new();
let mut parent_node: Option<@NamespaceTreeNode> = None; let mut parent_node: Option<@NamespaceTreeNode> = None;
// Create/Lookup namespace for each element of the path. // Create/Lookup namespace for each element of the path.

View File

@ -71,6 +71,7 @@ use middle::trans::machine::llsize_of;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
use syntax::codemap; use syntax::codemap;
@ -741,9 +742,9 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
} }
ast::ExprTup(ref args) => { ast::ExprTup(ref args) => {
let repr = adt::represent_type(bcx.ccx(), expr_ty(bcx, expr)); let repr = adt::represent_type(bcx.ccx(), expr_ty(bcx, expr));
let numbered_fields: ~[(uint, @ast::Expr)] = let numbered_fields: Vec<(uint, @ast::Expr)> =
args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect(); args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect();
trans_adt(bcx, repr, 0, numbered_fields, None, dest) trans_adt(bcx, repr, 0, numbered_fields.as_slice(), None, dest)
} }
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
@ -973,7 +974,7 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
match ty::get(ty).sty { match ty::get(ty).sty {
ty::ty_struct(did, ref substs) => { ty::ty_struct(did, ref substs) => {
op(0, struct_fields(tcx, did, substs)) op(0, struct_fields(tcx, did, substs).as_slice())
} }
ty::ty_enum(_, ref substs) => { ty::ty_enum(_, ref substs) => {
@ -995,7 +996,9 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
let variant_info = ty::enum_variant_with_id( let variant_info = ty::enum_variant_with_id(
tcx, enum_id, variant_id); tcx, enum_id, variant_id);
op(variant_info.disr_val, op(variant_info.disr_val,
struct_fields(tcx, variant_id, substs)) struct_fields(tcx,
variant_id,
substs).as_slice())
} }
_ => { _ => {
tcx.sess.bug("resolve didn't map this expr to a \ tcx.sess.bug("resolve didn't map this expr to a \
@ -1047,7 +1050,7 @@ fn trans_rec_or_struct<'a>(
}); });
let optbase = match base { let optbase = match base {
Some(base_expr) => { Some(base_expr) => {
let mut leftovers = ~[]; let mut leftovers = Vec::new();
for (i, b) in need_base.iter().enumerate() { for (i, b) in need_base.iter().enumerate() {
if *b { if *b {
leftovers.push((i, field_tys[i].mt.ty)) leftovers.push((i, field_tys[i].mt.ty))
@ -1081,8 +1084,7 @@ struct StructBaseInfo {
/// The base expression; will be evaluated after all explicit fields. /// The base expression; will be evaluated after all explicit fields.
expr: @ast::Expr, expr: @ast::Expr,
/// The indices of fields to copy paired with their types. /// The indices of fields to copy paired with their types.
fields: ~[(uint, ty::t)] fields: Vec<(uint, ty::t)> }
}
/** /**
* Constructs an ADT instance: * Constructs an ADT instance:
@ -1709,7 +1711,7 @@ fn trans_log_level<'a>(bcx: &'a Block<'a>) -> DatumBlock<'a, Expr> {
_ => false _ => false
} }
}); });
let modpath: ~[ast_map::PathElem] = path.collect(); let modpath: Vec<ast_map::PathElem> = path.collect();
let modname = ast_map::path_to_str(ast_map::Values(modpath.iter())); let modname = ast_map::path_to_str(ast_map::Values(modpath.iter()));
(modpath, modname) (modpath, modname)
}) })

View File

@ -27,6 +27,7 @@ use middle::ty::FnSig;
use middle::ty; use middle::ty;
use std::cmp; use std::cmp;
use std::libc::c_uint; use std::libc::c_uint;
use std::vec_ng::Vec;
use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64}; use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64};
use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System}; use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System};
use syntax::codemap::Span; use syntax::codemap::Span;
@ -56,7 +57,7 @@ struct ForeignTypes {
struct LlvmSignature { struct LlvmSignature {
// LLVM versions of the types of this function's arguments. // LLVM versions of the types of this function's arguments.
llarg_tys: ~[Type], llarg_tys: Vec<Type> ,
// LLVM version of the type that this function returns. Note that // LLVM version of the type that this function returns. Note that
// this *may not be* the declared return type of the foreign // this *may not be* the declared return type of the foreign
@ -163,7 +164,7 @@ pub fn trans_native_call<'a>(
llfn: ValueRef, llfn: ValueRef,
llretptr: ValueRef, llretptr: ValueRef,
llargs_rust: &[ValueRef], llargs_rust: &[ValueRef],
passed_arg_tys: ~[ty::t]) passed_arg_tys: Vec<ty::t> )
-> &'a Block<'a> { -> &'a Block<'a> {
/*! /*!
* Prepares a call to a native function. This requires adapting * Prepares a call to a native function. This requires adapting
@ -196,16 +197,16 @@ pub fn trans_native_call<'a>(
ty::ty_bare_fn(ref fn_ty) => (fn_ty.abis, fn_ty.sig.clone()), ty::ty_bare_fn(ref fn_ty) => (fn_ty.abis, fn_ty.sig.clone()),
_ => ccx.sess.bug("trans_native_call called on non-function type") _ => ccx.sess.bug("trans_native_call called on non-function type")
}; };
let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys); let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.as_slice());
let ret_def = !return_type_is_void(bcx.ccx(), fn_sig.output); let ret_def = !return_type_is_void(bcx.ccx(), fn_sig.output);
let fn_type = cabi::compute_abi_info(ccx, let fn_type = cabi::compute_abi_info(ccx,
llsig.llarg_tys, llsig.llarg_tys.as_slice(),
llsig.llret_ty, llsig.llret_ty,
ret_def); ret_def);
let arg_tys: &[cabi::ArgType] = fn_type.arg_tys; let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.as_slice();
let mut llargs_foreign = ~[]; let mut llargs_foreign = Vec::new();
// If the foreign ABI expects return value by pointer, supply the // If the foreign ABI expects return value by pointer, supply the
// pointer that Rust gave us. Sometimes we have to bitcast // pointer that Rust gave us. Sometimes we have to bitcast
@ -228,7 +229,8 @@ pub fn trans_native_call<'a>(
let mut llarg_rust = llarg_rust; let mut llarg_rust = llarg_rust;
// Does Rust pass this argument by pointer? // Does Rust pass this argument by pointer?
let rust_indirect = type_of::arg_is_indirect(ccx, passed_arg_tys[i]); let rust_indirect = type_of::arg_is_indirect(ccx,
*passed_arg_tys.get(i));
debug!("argument {}, llarg_rust={}, rust_indirect={}, arg_ty={}", debug!("argument {}, llarg_rust={}, rust_indirect={}, arg_ty={}",
i, i,
@ -239,7 +241,10 @@ pub fn trans_native_call<'a>(
// Ensure that we always have the Rust value indirectly, // Ensure that we always have the Rust value indirectly,
// because it makes bitcasting easier. // because it makes bitcasting easier.
if !rust_indirect { if !rust_indirect {
let scratch = base::alloca(bcx, type_of::type_of(ccx, passed_arg_tys[i]), "__arg"); let scratch =
base::alloca(bcx,
type_of::type_of(ccx, *passed_arg_tys.get(i)),
"__arg");
Store(bcx, llarg_rust, scratch); Store(bcx, llarg_rust, scratch);
llarg_rust = scratch; llarg_rust = scratch;
} }
@ -295,7 +300,11 @@ pub fn trans_native_call<'a>(
None None
}; };
let attrs = sret_attr.as_slice(); let attrs = sret_attr.as_slice();
let llforeign_retval = CallWithConv(bcx, llfn, llargs_foreign, cc, attrs); let llforeign_retval = CallWithConv(bcx,
llfn,
llargs_foreign.as_slice(),
cc,
attrs);
// If the function we just called does not use an outpointer, // If the function we just called does not use an outpointer,
// store the result into the rust outpointer. Cast the outpointer // store the result into the rust outpointer. Cast the outpointer
@ -466,7 +475,11 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
ccx.tcx.map.path_to_str(id), ccx.tcx.map.path_to_str(id),
id, t.repr(tcx)); id, t.repr(tcx));
let llfn = base::decl_internal_rust_fn(ccx, false, f.sig.inputs, f.sig.output, ps); let llfn = base::decl_internal_rust_fn(ccx,
false,
f.sig.inputs.as_slice(),
f.sig.output,
ps);
base::set_llvm_fn_attrs(attrs, llfn); base::set_llvm_fn_attrs(attrs, llfn);
base::trans_fn(ccx, decl, body, llfn, None, id, []); base::trans_fn(ccx, decl, body, llfn, None, id, []);
llfn llfn
@ -503,7 +516,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
llvm::LLVMPositionBuilderAtEnd(builder, the_block); llvm::LLVMPositionBuilderAtEnd(builder, the_block);
// Array for the arguments we will pass to the rust function. // Array for the arguments we will pass to the rust function.
let mut llrust_args = ~[]; let mut llrust_args = Vec::new();
let mut next_foreign_arg_counter: c_uint = 0; let mut next_foreign_arg_counter: c_uint = 0;
let next_foreign_arg: |pad: bool| -> c_uint = |pad: bool| { let next_foreign_arg: |pad: bool| -> c_uint = |pad: bool| {
next_foreign_arg_counter += if pad { next_foreign_arg_counter += if pad {
@ -579,10 +592,10 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
// Careful to adapt for cases where the native convention uses // Careful to adapt for cases where the native convention uses
// a pointer and Rust does not or vice versa. // a pointer and Rust does not or vice versa.
for i in range(0, tys.fn_sig.inputs.len()) { for i in range(0, tys.fn_sig.inputs.len()) {
let rust_ty = tys.fn_sig.inputs[i]; let rust_ty = *tys.fn_sig.inputs.get(i);
let llrust_ty = tys.llsig.llarg_tys[i]; let llrust_ty = *tys.llsig.llarg_tys.get(i);
let rust_indirect = type_of::arg_is_indirect(ccx, rust_ty); let rust_indirect = type_of::arg_is_indirect(ccx, rust_ty);
let llforeign_arg_ty = tys.fn_ty.arg_tys[i]; let llforeign_arg_ty = *tys.fn_ty.arg_tys.get(i);
let foreign_indirect = llforeign_arg_ty.is_indirect(); let foreign_indirect = llforeign_arg_ty.is_indirect();
// skip padding // skip padding
@ -730,7 +743,7 @@ fn foreign_signature(ccx: &CrateContext, fn_sig: &ty::FnSig, arg_tys: &[ty::t])
* values by pointer like we do. * values by pointer like we do.
*/ */
let llarg_tys = arg_tys.map(|&arg| type_of(ccx, arg)); let llarg_tys = arg_tys.iter().map(|&arg| type_of(ccx, arg)).collect();
let llret_ty = type_of::type_of(ccx, fn_sig.output); let llret_ty = type_of::type_of(ccx, fn_sig.output);
LlvmSignature { LlvmSignature {
llarg_tys: llarg_tys, llarg_tys: llarg_tys,
@ -750,10 +763,10 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
ty::ty_bare_fn(ref fn_ty) => fn_ty.sig.clone(), ty::ty_bare_fn(ref fn_ty) => fn_ty.sig.clone(),
_ => ccx.sess.bug("foreign_types_for_fn_ty called on non-function type") _ => ccx.sess.bug("foreign_types_for_fn_ty called on non-function type")
}; };
let llsig = foreign_signature(ccx, &fn_sig, fn_sig.inputs); let llsig = foreign_signature(ccx, &fn_sig, fn_sig.inputs.as_slice());
let ret_def = !return_type_is_void(ccx, fn_sig.output); let ret_def = !return_type_is_void(ccx, fn_sig.output);
let fn_ty = cabi::compute_abi_info(ccx, let fn_ty = cabi::compute_abi_info(ccx,
llsig.llarg_tys, llsig.llarg_tys.as_slice(),
llsig.llret_ty, llsig.llret_ty,
ret_def); ret_def);
debug!("foreign_types_for_fn_ty(\ debug!("foreign_types_for_fn_ty(\
@ -762,9 +775,9 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
fn_ty={} -> {}, \ fn_ty={} -> {}, \
ret_def={}", ret_def={}",
ty.repr(ccx.tcx), ty.repr(ccx.tcx),
ccx.tn.types_to_str(llsig.llarg_tys), ccx.tn.types_to_str(llsig.llarg_tys.as_slice()),
ccx.tn.type_to_str(llsig.llret_ty), ccx.tn.type_to_str(llsig.llret_ty),
ccx.tn.types_to_str(fn_ty.arg_tys.map(|t| t.ty)), ccx.tn.types_to_str(fn_ty.arg_tys.map(|t| t.ty).as_slice()),
ccx.tn.type_to_str(fn_ty.ret_ty.ty), ccx.tn.type_to_str(fn_ty.ret_ty.ty),
ret_def); ret_def);
@ -777,7 +790,7 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
} }
fn lltype_for_fn_from_foreign_types(tys: &ForeignTypes) -> Type { fn lltype_for_fn_from_foreign_types(tys: &ForeignTypes) -> Type {
let mut llargument_tys = ~[]; let mut llargument_tys = Vec::new();
let ret_ty = tys.fn_ty.ret_ty; let ret_ty = tys.fn_ty.ret_ty;
let llreturn_ty = if ret_ty.is_indirect() { let llreturn_ty = if ret_ty.is_indirect() {
@ -810,9 +823,9 @@ fn lltype_for_fn_from_foreign_types(tys: &ForeignTypes) -> Type {
} }
if tys.fn_sig.variadic { if tys.fn_sig.variadic {
Type::variadic_func(llargument_tys, &llreturn_ty) Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty)
} else { } else {
Type::func(llargument_tys, &llreturn_ty) Type::func(llargument_tys.as_slice(), &llreturn_ty)
} }
} }

View File

@ -245,7 +245,7 @@ fn trans_struct_drop<'a>(bcx: &'a Block<'a>,
// Find and call the actual destructor // Find and call the actual destructor
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did,
class_did, substs.tps.clone()); class_did, substs.tps.as_slice());
// The second argument is the "self" argument for drop // The second argument is the "self" argument for drop
let params = unsafe { let params = unsafe {
@ -262,8 +262,8 @@ fn trans_struct_drop<'a>(bcx: &'a Block<'a>,
// destructors if the user destructor fails. // destructors if the user destructor fails.
let field_scope = bcx.fcx.push_custom_cleanup_scope(); let field_scope = bcx.fcx.push_custom_cleanup_scope();
let self_arg = PointerCast(bcx, v0, params[0]); let self_arg = PointerCast(bcx, v0, *params.get(0));
let args = ~[self_arg]; let args = vec!(self_arg);
// Add all the fields as a value which needs to be cleaned at the end of // Add all the fields as a value which needs to be cleaned at the end of
// this scope. // this scope.

View File

@ -25,6 +25,7 @@ use middle::trans::machine;
use middle::trans::machine::llsize_of; use middle::trans::machine::llsize_of;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use middle::ty; use middle::ty;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
use syntax::parse::token; use syntax::parse::token;
@ -207,12 +208,12 @@ pub fn trans_intrinsic(ccx: @CrateContext,
// This requires that atomic intrinsics follow a specific naming pattern: // This requires that atomic intrinsics follow a specific naming pattern:
// "atomic_<operation>[_<ordering>], and no ordering means SeqCst // "atomic_<operation>[_<ordering>], and no ordering means SeqCst
if name.get().starts_with("atomic_") { if name.get().starts_with("atomic_") {
let split: ~[&str] = name.get().split('_').collect(); let split: Vec<&str> = name.get().split('_').collect();
assert!(split.len() >= 2, "Atomic intrinsic not correct format"); assert!(split.len() >= 2, "Atomic intrinsic not correct format");
let order = if split.len() == 2 { let order = if split.len() == 2 {
lib::llvm::SequentiallyConsistent lib::llvm::SequentiallyConsistent
} else { } else {
match split[2] { match *split.get(2) {
"relaxed" => lib::llvm::Monotonic, "relaxed" => lib::llvm::Monotonic,
"acq" => lib::llvm::Acquire, "acq" => lib::llvm::Acquire,
"rel" => lib::llvm::Release, "rel" => lib::llvm::Release,
@ -221,7 +222,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
} }
}; };
match split[1] { match *split.get(1) {
"cxchg" => { "cxchg" => {
let old = AtomicCmpXchg(bcx, get_param(decl, first_real_arg), let old = AtomicCmpXchg(bcx, get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
@ -284,7 +285,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
RetVoid(bcx); RetVoid(bcx);
} }
"size_of" => { "size_of" => {
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
Ret(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty) as uint)); Ret(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty) as uint));
} }
@ -294,7 +295,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
// if the value is non-immediate. Note that, with // if the value is non-immediate. Note that, with
// intrinsics, there are no argument cleanups to // intrinsics, there are no argument cleanups to
// concern ourselves with, so we can use an rvalue datum. // concern ourselves with, so we can use an rvalue datum.
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
let mode = appropriate_rvalue_mode(ccx, tp_ty); let mode = appropriate_rvalue_mode(ccx, tp_ty);
let src = Datum {val: get_param(decl, first_real_arg + 1u), let src = Datum {val: get_param(decl, first_real_arg + 1u),
ty: tp_ty, ty: tp_ty,
@ -303,17 +304,17 @@ pub fn trans_intrinsic(ccx: @CrateContext,
RetVoid(bcx); RetVoid(bcx);
} }
"min_align_of" => { "min_align_of" => {
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
Ret(bcx, C_uint(ccx, machine::llalign_of_min(ccx, lltp_ty) as uint)); Ret(bcx, C_uint(ccx, machine::llalign_of_min(ccx, lltp_ty) as uint));
} }
"pref_align_of"=> { "pref_align_of"=> {
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
Ret(bcx, C_uint(ccx, machine::llalign_of_pref(ccx, lltp_ty) as uint)); Ret(bcx, C_uint(ccx, machine::llalign_of_pref(ccx, lltp_ty) as uint));
} }
"get_tydesc" => { "get_tydesc" => {
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
let static_ti = get_tydesc(ccx, tp_ty); let static_ti = get_tydesc(ccx, tp_ty);
glue::lazily_emit_visit_glue(ccx, static_ti); glue::lazily_emit_visit_glue(ccx, static_ti);
@ -328,7 +329,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
"type_id" => { "type_id" => {
let hash = ty::hash_crate_independent( let hash = ty::hash_crate_independent(
ccx.tcx, ccx.tcx,
substs.tys[0], *substs.tys.get(0),
&ccx.link_meta.crate_hash); &ccx.link_meta.crate_hash);
// NB: This needs to be kept in lockstep with the TypeId struct in // NB: This needs to be kept in lockstep with the TypeId struct in
// libstd/unstable/intrinsics.rs // libstd/unstable/intrinsics.rs
@ -342,7 +343,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
} }
} }
"init" => { "init" => {
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
match bcx.fcx.llretptr.get() { match bcx.fcx.llretptr.get() {
Some(ptr) => { Store(bcx, C_null(lltp_ty), ptr); RetVoid(bcx); } Some(ptr) => { Store(bcx, C_null(lltp_ty), ptr); RetVoid(bcx); }
@ -352,7 +353,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
} }
"uninit" => { "uninit" => {
// Do nothing, this is effectively a no-op // Do nothing, this is effectively a no-op
let retty = substs.tys[0]; let retty = *substs.tys.get(0);
if type_is_immediate(ccx, retty) && !return_type_is_void(ccx, retty) { if type_is_immediate(ccx, retty) && !return_type_is_void(ccx, retty) {
unsafe { unsafe {
Ret(bcx, lib::llvm::llvm::LLVMGetUndef(type_of(ccx, retty).to_ref())); Ret(bcx, lib::llvm::llvm::LLVMGetUndef(type_of(ccx, retty).to_ref()));
@ -365,7 +366,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
RetVoid(bcx); RetVoid(bcx);
} }
"transmute" => { "transmute" => {
let (in_type, out_type) = (substs.tys[0], substs.tys[1]); let (in_type, out_type) = (*substs.tys.get(0), *substs.tys.get(1));
let llintype = type_of::type_of(ccx, in_type); let llintype = type_of::type_of(ccx, in_type);
let llouttype = type_of::type_of(ccx, out_type); let llouttype = type_of::type_of(ccx, out_type);
@ -432,11 +433,11 @@ pub fn trans_intrinsic(ccx: @CrateContext,
} }
} }
"needs_drop" => { "needs_drop" => {
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
Ret(bcx, C_bool(ty::type_needs_drop(ccx.tcx, tp_ty))); Ret(bcx, C_bool(ty::type_needs_drop(ccx.tcx, tp_ty)));
} }
"owns_managed" => { "owns_managed" => {
let tp_ty = substs.tys[0]; let tp_ty = *substs.tys.get(0);
Ret(bcx, C_bool(ty::type_contents(ccx.tcx, tp_ty).owns_managed())); Ret(bcx, C_bool(ty::type_contents(ccx.tcx, tp_ty).owns_managed()));
} }
"visit_tydesc" => { "visit_tydesc" => {
@ -452,9 +453,11 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let lladdr = InBoundsGEP(bcx, ptr, [offset]); let lladdr = InBoundsGEP(bcx, ptr, [offset]);
Ret(bcx, lladdr); Ret(bcx, lladdr);
} }
"copy_nonoverlapping_memory" => copy_intrinsic(bcx, false, substs.tys[0]), "copy_nonoverlapping_memory" => {
"copy_memory" => copy_intrinsic(bcx, true, substs.tys[0]), copy_intrinsic(bcx, false, *substs.tys.get(0))
"set_memory" => memset_intrinsic(bcx, substs.tys[0]), }
"copy_memory" => copy_intrinsic(bcx, true, *substs.tys.get(0)),
"set_memory" => memset_intrinsic(bcx, *substs.tys.get(0)),
"ctlz8" => count_zeros_intrinsic(bcx, "llvm.ctlz.i8"), "ctlz8" => count_zeros_intrinsic(bcx, "llvm.ctlz.i8"),
"ctlz16" => count_zeros_intrinsic(bcx, "llvm.ctlz.i16"), "ctlz16" => count_zeros_intrinsic(bcx, "llvm.ctlz.i16"),
"ctlz32" => count_zeros_intrinsic(bcx, "llvm.ctlz.i32"), "ctlz32" => count_zeros_intrinsic(bcx, "llvm.ctlz.i32"),

View File

@ -25,16 +25,16 @@ use middle::trans::expr::{SaveIn, Ignore};
use middle::trans::expr; use middle::trans::expr;
use middle::trans::glue; use middle::trans::glue;
use middle::trans::monomorphize; use middle::trans::monomorphize;
use middle::trans::type_::Type;
use middle::trans::type_of::*; use middle::trans::type_of::*;
use middle::ty; use middle::ty;
use middle::typeck; use middle::typeck;
use util::common::indenter; use util::common::indenter;
use util::ppaux::Repr; use util::ppaux::Repr;
use middle::trans::type_::Type;
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
use syntax::parse::token; use syntax::parse::token;
use syntax::{ast, ast_map, visit}; use syntax::{ast, ast_map, visit};
@ -202,18 +202,21 @@ pub fn trans_static_method_callee(bcx: &Block,
let vtbls = ccx.maps.vtable_map.borrow().get().get_copy(&expr_id); let vtbls = ccx.maps.vtable_map.borrow().get().get_copy(&expr_id);
let vtbls = resolve_vtables_in_fn_ctxt(bcx.fcx, vtbls); let vtbls = resolve_vtables_in_fn_ctxt(bcx.fcx, vtbls);
match vtbls[bound_index][0] { match vtbls.get(bound_index).get(0) {
typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => { &typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => {
assert!(rcvr_substs.iter().all(|t| !ty::type_needs_infer(*t))); assert!(rcvr_substs.iter().all(|t| !ty::type_needs_infer(*t)));
let mth_id = method_with_name(ccx, impl_did, mname); let mth_id = method_with_name(ccx, impl_did, mname);
let (callee_substs, callee_origins) = let (callee_substs, callee_origins) =
combine_impl_and_methods_tps( combine_impl_and_methods_tps(
bcx, mth_id, expr_id, false, bcx, mth_id, expr_id, false,
*rcvr_substs, rcvr_origins); rcvr_substs.as_slice(), rcvr_origins);
let llfn = trans_fn_ref_with_vtables(bcx, mth_id, expr_id, let llfn = trans_fn_ref_with_vtables(bcx,
false, callee_substs, mth_id,
expr_id,
false,
callee_substs.as_slice(),
Some(callee_origins)); Some(callee_origins));
let callee_ty = node_id_type(bcx, expr_id); let callee_ty = node_id_type(bcx, expr_id);
@ -268,14 +271,14 @@ fn trans_monomorphized_callee<'a>(bcx: &'a Block<'a>,
let (callee_substs, callee_origins) = let (callee_substs, callee_origins) =
combine_impl_and_methods_tps( combine_impl_and_methods_tps(
bcx, mth_id, expr_id, true, bcx, mth_id, expr_id, true,
*rcvr_substs, rcvr_origins); rcvr_substs.as_slice(), rcvr_origins);
// translate the function // translate the function
let llfn = trans_fn_ref_with_vtables(bcx, let llfn = trans_fn_ref_with_vtables(bcx,
mth_id, mth_id,
expr_id, expr_id,
true, true,
callee_substs, callee_substs.as_slice(),
Some(callee_origins)); Some(callee_origins));
Callee { bcx: bcx, data: Fn(llfn) } Callee { bcx: bcx, data: Fn(llfn) }
@ -292,7 +295,7 @@ fn combine_impl_and_methods_tps(bcx: &Block,
is_method: bool, is_method: bool,
rcvr_substs: &[ty::t], rcvr_substs: &[ty::t],
rcvr_origins: typeck::vtable_res) rcvr_origins: typeck::vtable_res)
-> (~[ty::t], typeck::vtable_res) { -> (Vec<ty::t> , typeck::vtable_res) {
/*! /*!
* *
* Creates a concatenated set of substitutions which includes * Creates a concatenated set of substitutions which includes
@ -316,8 +319,8 @@ fn combine_impl_and_methods_tps(bcx: &Block,
let node_substs = node_id_type_params(bcx, expr_id, is_method); let node_substs = node_id_type_params(bcx, expr_id, is_method);
debug!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx)); debug!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx));
let ty_substs let ty_substs
= vec::append(rcvr_substs.to_owned(), = vec_ng::append(Vec::from_slice(rcvr_substs),
node_substs.tailn(node_substs.len() - n_m_tps)); node_substs.tailn(node_substs.len() - n_m_tps));
debug!("n_m_tps={:?}", n_m_tps); debug!("n_m_tps={:?}", n_m_tps);
debug!("node_substs={:?}", node_substs.repr(ccx.tcx)); debug!("node_substs={:?}", node_substs.repr(ccx.tcx));
debug!("ty_substs={:?}", ty_substs.repr(ccx.tcx)); debug!("ty_substs={:?}", ty_substs.repr(ccx.tcx));
@ -327,11 +330,11 @@ fn combine_impl_and_methods_tps(bcx: &Block,
// exist, in which case we need to make them. // exist, in which case we need to make them.
let r_m_origins = match node_vtables(bcx, expr_id) { let r_m_origins = match node_vtables(bcx, expr_id) {
Some(vt) => vt, Some(vt) => vt,
None => @vec::from_elem(node_substs.len(), @~[]) None => @Vec::from_elem(node_substs.len(), @Vec::new())
}; };
let vtables let vtables
= @vec::append(rcvr_origins.to_owned(), = @vec_ng::append(Vec::from_slice(rcvr_origins.as_slice()),
r_m_origins.tailn(r_m_origins.len() - n_m_tps)); r_m_origins.tailn(r_m_origins.len() - n_m_tps));
(ty_substs, vtables) (ty_substs, vtables)
} }
@ -460,7 +463,7 @@ pub fn get_vtable(bcx: &Block,
let _icx = push_ctxt("meth::get_vtable"); let _icx = push_ctxt("meth::get_vtable");
// Check the cache. // Check the cache.
let hash_id = (self_ty, vtable_id(ccx, &origins[0])); let hash_id = (self_ty, vtable_id(ccx, origins.get(0)));
{ {
let vtables = ccx.vtables.borrow(); let vtables = ccx.vtables.borrow();
match vtables.get().find(&hash_id) { match vtables.get().find(&hash_id) {
@ -470,18 +473,25 @@ pub fn get_vtable(bcx: &Block,
} }
// Not in the cache. Actually build it. // Not in the cache. Actually build it.
let methods = origins.flat_map(|origin| { let mut methods = Vec::new();
for origin in origins.iter() {
match *origin { match *origin {
typeck::vtable_static(id, ref substs, sub_vtables) => { typeck::vtable_static(id, ref substs, sub_vtables) => {
emit_vtable_methods(bcx, id, *substs, sub_vtables) let vtable_methods = emit_vtable_methods(bcx,
id,
substs.as_slice(),
sub_vtables);
for vtable_method in vtable_methods.move_iter() {
methods.push(vtable_method)
}
} }
_ => ccx.sess.bug("get_vtable: expected a static origin"), _ => ccx.sess.bug("get_vtable: expected a static origin"),
} }
}); }
// Generate a destructor for the vtable. // Generate a destructor for the vtable.
let drop_glue = glue::get_drop_glue(ccx, self_ty); let drop_glue = glue::get_drop_glue(ccx, self_ty);
let vtable = make_vtable(ccx, drop_glue, methods); let vtable = make_vtable(ccx, drop_glue, methods.as_slice());
let mut vtables = ccx.vtables.borrow_mut(); let mut vtables = ccx.vtables.borrow_mut();
vtables.get().insert(hash_id, vtable); vtables.get().insert(hash_id, vtable);
@ -496,12 +506,12 @@ pub fn make_vtable(ccx: &CrateContext,
unsafe { unsafe {
let _icx = push_ctxt("meth::make_vtable"); let _icx = push_ctxt("meth::make_vtable");
let mut components = ~[drop_glue]; let mut components = vec!(drop_glue);
for &ptr in ptrs.iter() { for &ptr in ptrs.iter() {
components.push(ptr) components.push(ptr)
} }
let tbl = C_struct(components, false); let tbl = C_struct(components.as_slice(), false);
let sym = token::gensym("vtable"); let sym = token::gensym("vtable");
let vt_gvar = format!("vtable{}", sym).with_c_str(|buf| { let vt_gvar = format!("vtable{}", sym).with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl).to_ref(), buf) llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl).to_ref(), buf)
@ -517,7 +527,7 @@ fn emit_vtable_methods(bcx: &Block,
impl_id: ast::DefId, impl_id: ast::DefId,
substs: &[ty::t], substs: &[ty::t],
vtables: typeck::vtable_res) vtables: typeck::vtable_res)
-> ~[ValueRef] { -> Vec<ValueRef> {
let ccx = bcx.ccx(); let ccx = bcx.ccx();
let tcx = ccx.tcx; let tcx = ccx.tcx;
@ -589,7 +599,7 @@ pub fn trans_trait_cast<'a>(bcx: &'a Block<'a>,
*vtable_map.get().get(&id) *vtable_map.get().get(&id)
}; };
let res = resolve_vtables_in_fn_ctxt(bcx.fcx, res); let res = resolve_vtables_in_fn_ctxt(bcx.fcx, res);
res[0] *res.get(0)
}; };
let vtable = get_vtable(bcx, v_ty, origins); let vtable = get_vtable(bcx, v_ty, origins);
let llvtabledest = GEPi(bcx, lldest, [0u, abi::trt_field_vtable]); let llvtabledest = GEPi(bcx, lldest, [0u, abi::trt_field_vtable]);

View File

@ -23,6 +23,7 @@ use middle::ty;
use middle::typeck; use middle::typeck;
use util::ppaux::Repr; use util::ppaux::Repr;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
use syntax::ast_util::local_def; use syntax::ast_util::local_def;
@ -51,7 +52,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
let mut must_cast = false; let mut must_cast = false;
let psubsts = @param_substs { let psubsts = @param_substs {
tys: real_substs.tps.to_owned(), tys: real_substs.tps.clone(),
vtables: vtables, vtables: vtables,
self_ty: real_substs.self_ty.clone(), self_ty: real_substs.self_ty.clone(),
self_vtables: self_vtables self_vtables: self_vtables
@ -124,7 +125,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
debug!("monomorphic_fn about to subst into {}", llitem_ty.repr(ccx.tcx)); debug!("monomorphic_fn about to subst into {}", llitem_ty.repr(ccx.tcx));
let mono_ty = match is_static_provided { let mono_ty = match is_static_provided {
None => ty::subst_tps(ccx.tcx, psubsts.tys, None => ty::subst_tps(ccx.tcx, psubsts.tys.as_slice(),
psubsts.self_ty, llitem_ty), psubsts.self_ty, llitem_ty),
Some(num_method_ty_params) => { Some(num_method_ty_params) => {
// Static default methods are a little unfortunate, in // Static default methods are a little unfortunate, in
@ -186,7 +187,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
let mk_lldecl = || { let mk_lldecl = || {
let lldecl = decl_internal_rust_fn(ccx, false, let lldecl = decl_internal_rust_fn(ccx, false,
f.sig.inputs, f.sig.inputs.as_slice(),
f.sig.output, s); f.sig.output, s);
let mut monomorphized = ccx.monomorphized.borrow_mut(); let mut monomorphized = ccx.monomorphized.borrow_mut();
monomorphized.get().insert(hash_id, lldecl); monomorphized.get().insert(hash_id, lldecl);
@ -299,7 +300,7 @@ pub fn make_mono_id(ccx: @CrateContext,
// FIXME (possibly #5801): Need a lot of type hints to get // FIXME (possibly #5801): Need a lot of type hints to get
// .collect() to work. // .collect() to work.
let substs_iter = substs.self_ty.iter().chain(substs.tys.iter()); let substs_iter = substs.self_ty.iter().chain(substs.tys.iter());
let precise_param_ids: ~[(ty::t, Option<@~[mono_id]>)] = match substs.vtables { let precise_param_ids: Vec<(ty::t, Option<@Vec<mono_id> >)> = match substs.vtables {
Some(vts) => { Some(vts) => {
debug!("make_mono_id vtables={} substs={}", debug!("make_mono_id vtables={} substs={}",
vts.repr(ccx.tcx), substs.tys.repr(ccx.tcx)); vts.repr(ccx.tcx), substs.tys.repr(ccx.tcx));
@ -309,7 +310,7 @@ pub fn make_mono_id(ccx: @CrateContext,
(*subst, if !v.is_empty() { Some(@v) } else { None }) (*subst, if !v.is_empty() { Some(@v) } else { None })
}).collect() }).collect()
} }
None => substs_iter.map(|subst| (*subst, None::<@~[mono_id]>)).collect() None => substs_iter.map(|subst| (*subst, None::<@Vec<mono_id> >)).collect()
}; };

View File

@ -28,7 +28,8 @@ use util::ppaux::ty_to_str;
use arena::TypedArena; use arena::TypedArena;
use std::libc::c_uint; use std::libc::c_uint;
use std::option::{Some,None}; use std::option::{Some,None};
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
use syntax::ast::DefId; use syntax::ast::DefId;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
@ -37,7 +38,7 @@ use syntax::parse::token;
pub struct Reflector<'a> { pub struct Reflector<'a> {
visitor_val: ValueRef, visitor_val: ValueRef,
visitor_methods: @~[@ty::Method], visitor_methods: @Vec<@ty::Method> ,
final_bcx: &'a Block<'a>, final_bcx: &'a Block<'a>,
tydesc_ty: Type, tydesc_ty: Type,
bcx: &'a Block<'a> bcx: &'a Block<'a>
@ -70,12 +71,12 @@ impl<'a> Reflector<'a> {
scratch.val scratch.val
} }
pub fn c_size_and_align(&mut self, t: ty::t) -> ~[ValueRef] { pub fn c_size_and_align(&mut self, t: ty::t) -> Vec<ValueRef> {
let tr = type_of(self.bcx.ccx(), t); let tr = type_of(self.bcx.ccx(), t);
let s = machine::llsize_of_real(self.bcx.ccx(), tr); let s = machine::llsize_of_real(self.bcx.ccx(), tr);
let a = machine::llalign_of_min(self.bcx.ccx(), tr); let a = machine::llalign_of_min(self.bcx.ccx(), tr);
return ~[self.c_uint(s as uint), return vec!(self.c_uint(s as uint),
self.c_uint(a as uint)]; self.c_uint(a as uint));
} }
pub fn c_tydesc(&mut self, t: ty::t) -> ValueRef { pub fn c_tydesc(&mut self, t: ty::t) -> ValueRef {
@ -85,20 +86,20 @@ impl<'a> Reflector<'a> {
PointerCast(bcx, static_ti.tydesc, self.tydesc_ty.ptr_to()) PointerCast(bcx, static_ti.tydesc, self.tydesc_ty.ptr_to())
} }
pub fn c_mt(&mut self, mt: &ty::mt) -> ~[ValueRef] { pub fn c_mt(&mut self, mt: &ty::mt) -> Vec<ValueRef> {
~[self.c_uint(mt.mutbl as uint), vec!(self.c_uint(mt.mutbl as uint),
self.c_tydesc(mt.ty)] self.c_tydesc(mt.ty))
} }
pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) { pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) {
let fcx = self.bcx.fcx; let fcx = self.bcx.fcx;
let tcx = self.bcx.tcx(); let tcx = self.bcx.tcx();
let mth_idx = ty::method_idx( let mth_idx = ty::method_idx(token::str_to_ident(~"visit_" + ty_name),
token::str_to_ident(~"visit_" + ty_name), self.visitor_methods.as_slice()).expect(
*self.visitor_methods).expect(format!("couldn't find visit method \ format!("couldn't find visit method for {}", ty_name));
for {}", ty_name));
let mth_ty = let mth_ty =
ty::mk_bare_fn(tcx, self.visitor_methods[mth_idx].fty.clone()); ty::mk_bare_fn(tcx,
self.visitor_methods.get(mth_idx).fty.clone());
let v = self.visitor_val; let v = self.visitor_val;
debug!("passing {} args:", args.len()); debug!("passing {} args:", args.len());
let mut bcx = self.bcx; let mut bcx = self.bcx;
@ -130,15 +131,16 @@ impl<'a> Reflector<'a> {
pub fn vstore_name_and_extra(&mut self, pub fn vstore_name_and_extra(&mut self,
t: ty::t, t: ty::t,
vstore: ty::vstore) vstore: ty::vstore)
-> (~str, ~[ValueRef]) { -> (~str, Vec<ValueRef> ) {
match vstore { match vstore {
ty::vstore_fixed(n) => { ty::vstore_fixed(n) => {
let extra = vec::append(~[self.c_uint(n)], let extra = vec_ng::append(vec!(self.c_uint(n)),
self.c_size_and_align(t)); self.c_size_and_align(t)
.as_slice());
(~"fixed", extra) (~"fixed", extra)
} }
ty::vstore_slice(_) => (~"slice", ~[]), ty::vstore_slice(_) => (~"slice", Vec::new()),
ty::vstore_uniq => (~"uniq", ~[]), ty::vstore_uniq => (~"uniq", Vec::new()),
} }
} }
@ -172,18 +174,18 @@ impl<'a> Reflector<'a> {
ty::ty_unboxed_vec(ref mt) => { ty::ty_unboxed_vec(ref mt) => {
let values = self.c_mt(mt); let values = self.c_mt(mt);
self.visit("vec", values) self.visit("vec", values.as_slice())
} }
// Should rename to str_*/vec_*. // Should rename to str_*/vec_*.
ty::ty_str(vst) => { ty::ty_str(vst) => {
let (name, extra) = self.vstore_name_and_extra(t, vst); let (name, extra) = self.vstore_name_and_extra(t, vst);
self.visit(~"estr_" + name, extra) self.visit(~"estr_" + name, extra.as_slice())
} }
ty::ty_vec(ref mt, vst) => { ty::ty_vec(ref mt, vst) => {
let (name, extra) = self.vstore_name_and_extra(t, vst); let (name, extra) = self.vstore_name_and_extra(t, vst);
let extra = extra + self.c_mt(mt); let extra = vec_ng::append(extra, self.c_mt(mt).as_slice());
self.visit(~"evec_" + name, extra) self.visit(~"evec_" + name, extra.as_slice())
} }
// Should remove mt from box and uniq. // Should remove mt from box and uniq.
ty::ty_box(typ) => { ty::ty_box(typ) => {
@ -191,31 +193,31 @@ impl<'a> Reflector<'a> {
ty: typ, ty: typ,
mutbl: ast::MutImmutable, mutbl: ast::MutImmutable,
}); });
self.visit("box", extra) self.visit("box", extra.as_slice())
} }
ty::ty_uniq(typ) => { ty::ty_uniq(typ) => {
let extra = self.c_mt(&ty::mt { let extra = self.c_mt(&ty::mt {
ty: typ, ty: typ,
mutbl: ast::MutImmutable, mutbl: ast::MutImmutable,
}); });
self.visit("uniq", extra) self.visit("uniq", extra.as_slice())
} }
ty::ty_ptr(ref mt) => { ty::ty_ptr(ref mt) => {
let extra = self.c_mt(mt); let extra = self.c_mt(mt);
self.visit("ptr", extra) self.visit("ptr", extra.as_slice())
} }
ty::ty_rptr(_, ref mt) => { ty::ty_rptr(_, ref mt) => {
let extra = self.c_mt(mt); let extra = self.c_mt(mt);
self.visit("rptr", extra) self.visit("rptr", extra.as_slice())
} }
ty::ty_tup(ref tys) => { ty::ty_tup(ref tys) => {
let extra = ~[self.c_uint(tys.len())] let extra = vec_ng::append(vec!(self.c_uint(tys.len())),
+ self.c_size_and_align(t); self.c_size_and_align(t).as_slice());
self.bracketed("tup", extra, |this| { self.bracketed("tup", extra.as_slice(), |this| {
for (i, t) in tys.iter().enumerate() { for (i, t) in tys.iter().enumerate() {
let extra = ~[this.c_uint(i), this.c_tydesc(*t)]; let extra = vec!(this.c_uint(i), this.c_tydesc(*t));
this.visit("tup_field", extra); this.visit("tup_field", extra.as_slice());
} }
}) })
} }
@ -226,13 +228,13 @@ impl<'a> Reflector<'a> {
let pureval = ast_purity_constant(fty.purity); let pureval = ast_purity_constant(fty.purity);
let sigilval = ast_sigil_constant(fty.sigil); let sigilval = ast_sigil_constant(fty.sigil);
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u}; let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = ~[self.c_uint(pureval), let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval), self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()), self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval)]; self.c_uint(retval));
self.visit("enter_fn", extra); self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig); self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra); self.visit("leave_fn", extra.as_slice());
} }
// FIXME (#2594): fetch constants out of intrinsic:: for the // FIXME (#2594): fetch constants out of intrinsic:: for the
@ -241,37 +243,37 @@ impl<'a> Reflector<'a> {
let pureval = ast_purity_constant(fty.purity); let pureval = ast_purity_constant(fty.purity);
let sigilval = 0u; let sigilval = 0u;
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u}; let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = ~[self.c_uint(pureval), let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval), self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()), self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval)]; self.c_uint(retval));
self.visit("enter_fn", extra); self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig); self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra); self.visit("leave_fn", extra.as_slice());
} }
ty::ty_struct(did, ref substs) => { ty::ty_struct(did, ref substs) => {
let fields = ty::struct_fields(tcx, did, substs); let fields = ty::struct_fields(tcx, did, substs);
let mut named_fields = false; let mut named_fields = false;
if !fields.is_empty() { if !fields.is_empty() {
named_fields = named_fields = fields.get(0).ident.name !=
fields[0].ident.name != special_idents::unnamed_field.name; special_idents::unnamed_field.name;
} }
let extra = ~[ let extra = vec_ng::append(vec!(
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, self.c_slice(token::intern_and_get_ident(ty_to_str(tcx,
t))), t))),
self.c_bool(named_fields), self.c_bool(named_fields),
self.c_uint(fields.len()) self.c_uint(fields.len())
] + self.c_size_and_align(t); ), self.c_size_and_align(t).as_slice());
self.bracketed("class", extra, |this| { self.bracketed("class", extra.as_slice(), |this| {
for (i, field) in fields.iter().enumerate() { for (i, field) in fields.iter().enumerate() {
let extra = ~[ let extra = vec_ng::append(vec!(
this.c_uint(i), this.c_uint(i),
this.c_slice(token::get_ident(field.ident)), this.c_slice(token::get_ident(field.ident)),
this.c_bool(named_fields) this.c_bool(named_fields)
] + this.c_mt(&field.mt); ), this.c_mt(&field.mt).as_slice());
this.visit("class_field", extra); this.visit("class_field", extra.as_slice());
} }
}) })
} }
@ -319,25 +321,30 @@ impl<'a> Reflector<'a> {
llfdecl llfdecl
}; };
let enum_args = ~[self.c_uint(variants.len()), make_get_disr()] let enum_args = vec_ng::append(vec!(self.c_uint(variants.len()),
+ self.c_size_and_align(t); make_get_disr()),
self.bracketed("enum", enum_args, |this| { self.c_size_and_align(t)
.as_slice());
self.bracketed("enum", enum_args.as_slice(), |this| {
for (i, v) in variants.iter().enumerate() { for (i, v) in variants.iter().enumerate() {
let name = token::get_ident(v.name); let name = token::get_ident(v.name);
let variant_args = ~[this.c_uint(i), let variant_args = vec!(this.c_uint(i),
C_u64(v.disr_val), C_u64(v.disr_val),
this.c_uint(v.args.len()), this.c_uint(v.args.len()),
this.c_slice(name)]; this.c_slice(name));
this.bracketed("enum_variant", variant_args, |this| { this.bracketed("enum_variant",
variant_args.as_slice(),
|this| {
for (j, a) in v.args.iter().enumerate() { for (j, a) in v.args.iter().enumerate() {
let bcx = this.bcx; let bcx = this.bcx;
let null = C_null(llptrty); let null = C_null(llptrty);
let ptr = adt::trans_field_ptr(bcx, repr, null, v.disr_val, j); let ptr = adt::trans_field_ptr(bcx, repr, null, v.disr_val, j);
let offset = p2i(ccx, ptr); let offset = p2i(ccx, ptr);
let field_args = ~[this.c_uint(j), let field_args = vec!(this.c_uint(j),
offset, offset,
this.c_tydesc(*a)]; this.c_tydesc(*a));
this.visit("enum_variant_field", field_args); this.visit("enum_variant_field",
field_args.as_slice());
} }
}) })
} }
@ -355,8 +362,8 @@ impl<'a> Reflector<'a> {
ty::ty_infer(_) => self.leaf("infer"), ty::ty_infer(_) => self.leaf("infer"),
ty::ty_err => self.leaf("err"), ty::ty_err => self.leaf("err"),
ty::ty_param(ref p) => { ty::ty_param(ref p) => {
let extra = ~[self.c_uint(p.idx)]; let extra = vec!(self.c_uint(p.idx));
self.visit("param", extra) self.visit("param", extra.as_slice())
} }
ty::ty_self(..) => self.leaf("self") ty::ty_self(..) => self.leaf("self")
} }
@ -365,15 +372,15 @@ impl<'a> Reflector<'a> {
pub fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) { pub fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
for (i, arg) in sig.inputs.iter().enumerate() { for (i, arg) in sig.inputs.iter().enumerate() {
let modeval = 5u; // "by copy" let modeval = 5u; // "by copy"
let extra = ~[self.c_uint(i), let extra = vec!(self.c_uint(i),
self.c_uint(modeval), self.c_uint(modeval),
self.c_tydesc(*arg)]; self.c_tydesc(*arg));
self.visit("fn_input", extra); self.visit("fn_input", extra.as_slice());
} }
let extra = ~[self.c_uint(retval), let extra = vec!(self.c_uint(retval),
self.c_bool(sig.variadic), self.c_bool(sig.variadic),
self.c_tydesc(sig.output)]; self.c_tydesc(sig.output));
self.visit("fn_output", extra); self.visit("fn_output", extra.as_slice());
} }
} }

View File

@ -20,8 +20,9 @@ use syntax::ast;
use syntax::abi::{Architecture, X86, X86_64, Arm, Mips}; use syntax::abi::{Architecture, X86, X86_64, Arm, Mips};
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::vec;
use std::cast; use std::cast;
use std::vec;
use std::vec_ng::Vec;
use std::libc::{c_uint}; use std::libc::{c_uint};
@ -295,14 +296,14 @@ impl Type {
} }
} }
pub fn field_types(&self) -> ~[Type] { pub fn field_types(&self) -> Vec<Type> {
unsafe { unsafe {
let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint; let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint;
if n_elts == 0 { if n_elts == 0 {
return ~[]; return Vec::new();
} }
let mut elts = vec::from_elem(n_elts, 0 as TypeRef); let mut elts = Vec::from_elem(n_elts, 0 as TypeRef);
llvm::LLVMGetStructElementTypes(self.to_ref(), &mut elts[0]); llvm::LLVMGetStructElementTypes(self.to_ref(), elts.get_mut(0));
cast::transmute(elts) cast::transmute(elts)
} }
} }
@ -311,10 +312,10 @@ impl Type {
ty!(llvm::LLVMGetReturnType(self.to_ref())) ty!(llvm::LLVMGetReturnType(self.to_ref()))
} }
pub fn func_params(&self) -> ~[Type] { pub fn func_params(&self) -> Vec<Type> {
unsafe { unsafe {
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint; let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint;
let args = vec::from_elem(n_args, 0 as TypeRef); let args = Vec::from_elem(n_args, 0 as TypeRef);
llvm::LLVMGetParamTypes(self.to_ref(), args.as_ptr()); llvm::LLVMGetParamTypes(self.to_ref(), args.as_ptr());
cast::transmute(args) cast::transmute(args)
} }

View File

@ -19,6 +19,7 @@ use util::ppaux::Repr;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::opt_vec; use syntax::opt_vec;
@ -41,7 +42,7 @@ pub fn type_of_explicit_arg(ccx: &CrateContext, arg_ty: ty::t) -> Type {
pub fn type_of_rust_fn(cx: &CrateContext, has_env: bool, pub fn type_of_rust_fn(cx: &CrateContext, has_env: bool,
inputs: &[ty::t], output: ty::t) -> Type { inputs: &[ty::t], output: ty::t) -> Type {
let mut atys: ~[Type] = ~[]; let mut atys: Vec<Type> = Vec::new();
// Arg 0: Output pointer. // Arg 0: Output pointer.
// (if the output type is non-immediate) // (if the output type is non-immediate)
@ -62,9 +63,9 @@ pub fn type_of_rust_fn(cx: &CrateContext, has_env: bool,
// Use the output as the actual return value if it's immediate. // Use the output as the actual return value if it's immediate.
if use_out_pointer || return_type_is_void(cx, output) { if use_out_pointer || return_type_is_void(cx, output) {
Type::func(atys, &Type::void()) Type::func(atys.as_slice(), &Type::void())
} else { } else {
Type::func(atys, &lloutputtype) Type::func(atys.as_slice(), &lloutputtype)
} }
} }
@ -72,11 +73,14 @@ pub fn type_of_rust_fn(cx: &CrateContext, has_env: bool,
pub fn type_of_fn_from_ty(cx: &CrateContext, fty: ty::t) -> Type { pub fn type_of_fn_from_ty(cx: &CrateContext, fty: ty::t) -> Type {
match ty::get(fty).sty { match ty::get(fty).sty {
ty::ty_closure(ref f) => { ty::ty_closure(ref f) => {
type_of_rust_fn(cx, true, f.sig.inputs, f.sig.output) type_of_rust_fn(cx, true, f.sig.inputs.as_slice(), f.sig.output)
} }
ty::ty_bare_fn(ref f) => { ty::ty_bare_fn(ref f) => {
if f.abis.is_rust() || f.abis.is_intrinsic() { if f.abis.is_rust() || f.abis.is_intrinsic() {
type_of_rust_fn(cx, false, f.sig.inputs, f.sig.output) type_of_rust_fn(cx,
false,
f.sig.inputs.as_slice(),
f.sig.output)
} else { } else {
foreign::lltype_for_foreign_fn(cx, fty) foreign::lltype_for_foreign_fn(cx, fty)
} }
@ -216,7 +220,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
// avoids creating more than one copy of the enum when one // avoids creating more than one copy of the enum when one
// of the enum's variants refers to the enum itself. // of the enum's variants refers to the enum itself.
let repr = adt::represent_type(cx, t); let repr = adt::represent_type(cx, t);
let name = llvm_type_name(cx, an_enum, did, substs.tps); let name = llvm_type_name(cx, an_enum, did, substs.tps.as_slice());
adt::incomplete_type_of(cx, repr, name) adt::incomplete_type_of(cx, repr, name)
} }
ty::ty_box(typ) => { ty::ty_box(typ) => {
@ -277,7 +281,10 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
// in *after* placing it into the type cache. This prevents // in *after* placing it into the type cache. This prevents
// infinite recursion with recursive struct types. // infinite recursion with recursive struct types.
let repr = adt::represent_type(cx, t); let repr = adt::represent_type(cx, t);
let name = llvm_type_name(cx, a_struct, did, substs.tps); let name = llvm_type_name(cx,
a_struct,
did,
substs.tps.as_slice());
adt::incomplete_type_of(cx, repr, name) adt::incomplete_type_of(cx, repr, name)
} }
} }

View File

@ -40,7 +40,8 @@ use std::fmt;
use std::hash::{Hash, sip}; use std::hash::{Hash, sip};
use std::ops; use std::ops;
use std::rc::Rc; use std::rc::Rc;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use syntax::ast::*; use syntax::ast::*;
use syntax::ast_util::{is_local, lit_is_str}; use syntax::ast_util::{is_local, lit_is_str};
@ -122,8 +123,7 @@ impl Method {
pub struct Impl { pub struct Impl {
did: DefId, did: DefId,
ident: Ident, ident: Ident,
methods: ~[@Method] methods: Vec<@Method> }
}
#[deriving(Clone, Eq, Hash)] #[deriving(Clone, Eq, Hash)]
pub struct mt { pub struct mt {
@ -280,16 +280,16 @@ pub struct ctxt_ {
// of this node. This only applies to nodes that refer to entities // of this node. This only applies to nodes that refer to entities
// parameterized by type parameters, such as generic fns, types, or // parameterized by type parameters, such as generic fns, types, or
// other items. // other items.
node_type_substs: RefCell<NodeMap<~[t]>>, node_type_substs: RefCell<NodeMap<Vec<t>>>,
// Maps from a method to the method "descriptor" // Maps from a method to the method "descriptor"
methods: RefCell<DefIdMap<@Method>>, methods: RefCell<DefIdMap<@Method>>,
// Maps from a trait def-id to a list of the def-ids of its methods // Maps from a trait def-id to a list of the def-ids of its methods
trait_method_def_ids: RefCell<DefIdMap<@~[DefId]>>, trait_method_def_ids: RefCell<DefIdMap<@Vec<DefId> >>,
// A cache for the trait_methods() routine // A cache for the trait_methods() routine
trait_methods_cache: RefCell<DefIdMap<@~[@Method]>>, trait_methods_cache: RefCell<DefIdMap<@Vec<@Method> >>,
impl_trait_cache: RefCell<DefIdMap<Option<@ty::TraitRef>>>, impl_trait_cache: RefCell<DefIdMap<Option<@ty::TraitRef>>>,
@ -305,14 +305,14 @@ pub struct ctxt_ {
needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>, needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>,
tc_cache: RefCell<HashMap<uint, TypeContents>>, tc_cache: RefCell<HashMap<uint, TypeContents>>,
ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>, ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>,
enum_var_cache: RefCell<DefIdMap<@~[@VariantInfo]>>, enum_var_cache: RefCell<DefIdMap<@Vec<@VariantInfo> >>,
ty_param_defs: RefCell<NodeMap<TypeParameterDef>>, ty_param_defs: RefCell<NodeMap<TypeParameterDef>>,
adjustments: RefCell<NodeMap<@AutoAdjustment>>, adjustments: RefCell<NodeMap<@AutoAdjustment>>,
normalized_cache: RefCell<HashMap<t, t>>, normalized_cache: RefCell<HashMap<t, t>>,
lang_items: @middle::lang_items::LanguageItems, lang_items: @middle::lang_items::LanguageItems,
// A mapping of fake provided method def_ids to the default implementation // A mapping of fake provided method def_ids to the default implementation
provided_method_sources: RefCell<DefIdMap<ast::DefId>>, provided_method_sources: RefCell<DefIdMap<ast::DefId>>,
supertraits: RefCell<DefIdMap<@~[@TraitRef]>>, supertraits: RefCell<DefIdMap<@Vec<@TraitRef> >>,
// Maps from def-id of a type or region parameter to its // Maps from def-id of a type or region parameter to its
// (inferred) variance. // (inferred) variance.
@ -328,12 +328,12 @@ pub struct ctxt_ {
destructors: RefCell<DefIdSet>, destructors: RefCell<DefIdSet>,
// Maps a trait onto a list of impls of that trait. // Maps a trait onto a list of impls of that trait.
trait_impls: RefCell<DefIdMap<@RefCell<~[@Impl]>>>, trait_impls: RefCell<DefIdMap<@RefCell<Vec<@Impl> >>>,
// Maps a def_id of a type to a list of its inherent impls. // Maps a def_id of a type to a list of its inherent impls.
// Contains implementations of methods that are inherent to a type. // Contains implementations of methods that are inherent to a type.
// Methods in these implementations don't need to be exported. // Methods in these implementations don't need to be exported.
inherent_impls: RefCell<DefIdMap<@RefCell<~[@Impl]>>>, inherent_impls: RefCell<DefIdMap<@RefCell<Vec<@Impl> >>>,
// Maps a def_id of an impl to an Impl structure. // Maps a def_id of an impl to an Impl structure.
// Note that this contains all of the impls that we know about, // Note that this contains all of the impls that we know about,
@ -461,7 +461,7 @@ pub struct ClosureTy {
#[deriving(Clone, Eq, Hash)] #[deriving(Clone, Eq, Hash)]
pub struct FnSig { pub struct FnSig {
binder_id: ast::NodeId, binder_id: ast::NodeId,
inputs: ~[t], inputs: Vec<t>,
output: t, output: t,
variadic: bool variadic: bool
} }
@ -684,7 +684,7 @@ pub enum RegionSubsts {
#[deriving(Clone, Eq, Hash)] #[deriving(Clone, Eq, Hash)]
pub struct substs { pub struct substs {
self_ty: Option<ty::t>, self_ty: Option<ty::t>,
tps: ~[t], tps: Vec<t>,
regions: RegionSubsts, regions: RegionSubsts,
} }
@ -756,7 +756,7 @@ pub enum sty {
ty_closure(ClosureTy), ty_closure(ClosureTy),
ty_trait(DefId, substs, TraitStore, ast::Mutability, BuiltinBounds), ty_trait(DefId, substs, TraitStore, ast::Mutability, BuiltinBounds),
ty_struct(DefId, substs), ty_struct(DefId, substs),
ty_tup(~[t]), ty_tup(Vec<t>),
ty_param(param_ty), // type parameter ty_param(param_ty), // type parameter
ty_self(DefId), /* special, implicit `self` type parameter; ty_self(DefId), /* special, implicit `self` type parameter;
@ -836,8 +836,7 @@ pub enum type_err {
#[deriving(Eq, Hash)] #[deriving(Eq, Hash)]
pub struct ParamBounds { pub struct ParamBounds {
builtin_bounds: BuiltinBounds, builtin_bounds: BuiltinBounds,
trait_bounds: ~[@TraitRef] trait_bounds: Vec<@TraitRef> }
}
pub type BuiltinBounds = EnumSet<BuiltinBound>; pub type BuiltinBounds = EnumSet<BuiltinBound>;
@ -1006,10 +1005,10 @@ pub struct RegionParameterDef {
#[deriving(Clone)] #[deriving(Clone)]
pub struct Generics { pub struct Generics {
/// List of type parameters declared on the item. /// List of type parameters declared on the item.
type_param_defs: Rc<~[TypeParameterDef]>, type_param_defs: Rc<Vec<TypeParameterDef> >,
/// List of region parameters declared on the item. /// List of region parameters declared on the item.
region_param_defs: Rc<~[RegionParameterDef]>, region_param_defs: Rc<Vec<RegionParameterDef> >,
} }
impl Generics { impl Generics {
@ -1048,7 +1047,7 @@ pub struct ParameterEnvironment {
self_param_bound: Option<@TraitRef>, self_param_bound: Option<@TraitRef>,
/// Bounds on each numbered type parameter /// Bounds on each numbered type parameter
type_param_bounds: ~[ParamBounds], type_param_bounds: Vec<ParamBounds> ,
} }
/// A polytype. /// A polytype.
@ -1412,7 +1411,7 @@ pub fn mk_mut_unboxed_vec(cx: ctxt, ty: t) -> t {
mk_t(cx, ty_unboxed_vec(mt {ty: ty, mutbl: ast::MutImmutable})) mk_t(cx, ty_unboxed_vec(mt {ty: ty, mutbl: ast::MutImmutable}))
} }
pub fn mk_tup(cx: ctxt, ts: ~[t]) -> t { mk_t(cx, ty_tup(ts)) } pub fn mk_tup(cx: ctxt, ts: Vec<t>) -> t { mk_t(cx, ty_tup(ts)) }
pub fn mk_closure(cx: ctxt, fty: ClosureTy) -> t { pub fn mk_closure(cx: ctxt, fty: ClosureTy) -> t {
mk_t(cx, ty_closure(fty)) mk_t(cx, ty_closure(fty))
@ -1433,7 +1432,7 @@ pub fn mk_ctor_fn(cx: ctxt,
abis: AbiSet::Rust(), abis: AbiSet::Rust(),
sig: FnSig { sig: FnSig {
binder_id: binder_id, binder_id: binder_id,
inputs: input_args, inputs: Vec::from_slice(input_args),
output: output, output: output,
variadic: false variadic: false
} }
@ -1667,7 +1666,7 @@ pub fn simd_type(cx: ctxt, ty: t) -> t {
match get(ty).sty { match get(ty).sty {
ty_struct(did, ref substs) => { ty_struct(did, ref substs) => {
let fields = lookup_struct_fields(cx, did); let fields = lookup_struct_fields(cx, did);
lookup_field_type(cx, did, fields[0].id, substs) lookup_field_type(cx, did, fields.get(0).id, substs)
} }
_ => fail!("simd_type called on invalid type") _ => fail!("simd_type called on invalid type")
} }
@ -1685,7 +1684,7 @@ pub fn simd_size(cx: ctxt, ty: t) -> uint {
pub fn get_element_type(ty: t, i: uint) -> t { pub fn get_element_type(ty: t, i: uint) -> t {
match get(ty).sty { match get(ty).sty {
ty_tup(ref ts) => return ts[i], ty_tup(ref ts) => return *ts.get(i),
_ => fail!("get_element_type called on invalid type") _ => fail!("get_element_type called on invalid type")
} }
} }
@ -2198,7 +2197,8 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
ty_struct(did, ref substs) => { ty_struct(did, ref substs) => {
let flds = struct_fields(cx, did, substs); let flds = struct_fields(cx, did, substs);
let mut res = let mut res =
TypeContents::union(flds, |f| tc_mt(cx, f.mt, cache)); TypeContents::union(flds.as_slice(),
|f| tc_mt(cx, f.mt, cache));
if ty::has_dtor(cx, did) { if ty::has_dtor(cx, did) {
res = res | TC::OwnsDtor; res = res | TC::OwnsDtor;
} }
@ -2206,14 +2206,16 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
} }
ty_tup(ref tys) => { ty_tup(ref tys) => {
TypeContents::union(*tys, |ty| tc_ty(cx, *ty, cache)) TypeContents::union(tys.as_slice(),
|ty| tc_ty(cx, *ty, cache))
} }
ty_enum(did, ref substs) => { ty_enum(did, ref substs) => {
let variants = substd_enum_variants(cx, did, substs); let variants = substd_enum_variants(cx, did, substs);
let res = let res =
TypeContents::union(variants, |variant| { TypeContents::union(variants.as_slice(), |variant| {
TypeContents::union(variant.args, |arg_ty| { TypeContents::union(variant.args.as_slice(),
|arg_ty| {
tc_ty(cx, *arg_ty, cache) tc_ty(cx, *arg_ty, cache)
}) })
}); });
@ -2233,7 +2235,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
let tp_def = ty_param_defs.get().get(&p.def_id.node); let tp_def = ty_param_defs.get().get(&p.def_id.node);
kind_bounds_to_contents(cx, kind_bounds_to_contents(cx,
tp_def.bounds.builtin_bounds, tp_def.bounds.builtin_bounds,
tp_def.bounds.trait_bounds) tp_def.bounds.trait_bounds.as_slice())
} }
ty_self(def_id) => { ty_self(def_id) => {
@ -2391,7 +2393,7 @@ pub fn type_moves_by_default(cx: ctxt, ty: t) -> bool {
// True if instantiating an instance of `r_ty` requires an instance of `r_ty`. // True if instantiating an instance of `r_ty` requires an instance of `r_ty`.
pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool { pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
fn type_requires(cx: ctxt, seen: &mut ~[DefId], fn type_requires(cx: ctxt, seen: &mut Vec<DefId> ,
r_ty: t, ty: t) -> bool { r_ty: t, ty: t) -> bool {
debug!("type_requires({}, {})?", debug!("type_requires({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_str(cx, r_ty),
@ -2409,7 +2411,7 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
return r; return r;
} }
fn subtypes_require(cx: ctxt, seen: &mut ~[DefId], fn subtypes_require(cx: ctxt, seen: &mut Vec<DefId> ,
r_ty: t, ty: t) -> bool { r_ty: t, ty: t) -> bool {
debug!("subtypes_require({}, {})?", debug!("subtypes_require({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_str(cx, r_ty),
@ -2497,7 +2499,7 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
return r; return r;
} }
let mut seen = ~[]; let mut seen = Vec::new();
!subtypes_require(cx, &mut seen, r_ty, r_ty) !subtypes_require(cx, &mut seen, r_ty, r_ty)
} }
@ -2518,7 +2520,7 @@ pub enum Representability {
pub fn is_type_representable(cx: ctxt, ty: t) -> Representability { pub fn is_type_representable(cx: ctxt, ty: t) -> Representability {
// Iterate until something non-representable is found // Iterate until something non-representable is found
fn find_nonrepresentable<It: Iterator<t>>(cx: ctxt, seen: &mut ~[DefId], fn find_nonrepresentable<It: Iterator<t>>(cx: ctxt, seen: &mut Vec<DefId> ,
mut iter: It) -> Representability { mut iter: It) -> Representability {
for ty in iter { for ty in iter {
let r = type_structurally_recursive(cx, seen, ty); let r = type_structurally_recursive(cx, seen, ty);
@ -2531,7 +2533,7 @@ pub fn is_type_representable(cx: ctxt, ty: t) -> Representability {
// Does the type `ty` directly (without indirection through a pointer) // Does the type `ty` directly (without indirection through a pointer)
// contain any types on stack `seen`? // contain any types on stack `seen`?
fn type_structurally_recursive(cx: ctxt, seen: &mut ~[DefId], fn type_structurally_recursive(cx: ctxt, seen: &mut Vec<DefId> ,
ty: t) -> Representability { ty: t) -> Representability {
debug!("type_structurally_recursive: {}", debug!("type_structurally_recursive: {}",
::util::ppaux::ty_to_str(cx, ty)); ::util::ppaux::ty_to_str(cx, ty));
@ -2597,7 +2599,7 @@ pub fn is_type_representable(cx: ctxt, ty: t) -> Representability {
// To avoid a stack overflow when checking an enum variant or struct that // To avoid a stack overflow when checking an enum variant or struct that
// contains a different, structurally recursive type, maintain a stack // contains a different, structurally recursive type, maintain a stack
// of seen types and check recursion for each of them (issues #3008, #3779). // of seen types and check recursion for each of them (issues #3008, #3779).
let mut seen: ~[DefId] = ~[]; let mut seen: Vec<DefId> = Vec::new();
type_structurally_recursive(cx, &mut seen, ty) type_structurally_recursive(cx, &mut seen, ty)
} }
@ -2788,10 +2790,10 @@ pub fn node_id_to_type_opt(cx: ctxt, id: ast::NodeId) -> Option<t> {
} }
// FIXME(pcwalton): Makes a copy, bleh. Probably better to not do that. // FIXME(pcwalton): Makes a copy, bleh. Probably better to not do that.
pub fn node_id_to_type_params(cx: ctxt, id: ast::NodeId) -> ~[t] { pub fn node_id_to_type_params(cx: ctxt, id: ast::NodeId) -> Vec<t> {
let node_type_substs = cx.node_type_substs.borrow(); let node_type_substs = cx.node_type_substs.borrow();
match node_type_substs.get().find(&id) { match node_type_substs.get().find(&id) {
None => return ~[], None => return Vec::new(),
Some(ts) => return (*ts).clone(), Some(ts) => return (*ts).clone(),
} }
} }
@ -2822,7 +2824,7 @@ pub fn ty_fn_sig(fty: t) -> FnSig {
} }
// Type accessors for substructures of types // Type accessors for substructures of types
pub fn ty_fn_args(fty: t) -> ~[t] { pub fn ty_fn_args(fty: t) -> Vec<t> {
match get(fty).sty { match get(fty).sty {
ty_bare_fn(ref f) => f.sig.inputs.clone(), ty_bare_fn(ref f) => f.sig.inputs.clone(),
ty_closure(ref f) => f.sig.inputs.clone(), ty_closure(ref f) => f.sig.inputs.clone(),
@ -2925,8 +2927,8 @@ pub fn replace_closure_return_type(tcx: ctxt, fn_type: t, ret_type: t) -> t {
} }
// Returns a vec of all the input and output types of fty. // Returns a vec of all the input and output types of fty.
pub fn tys_in_fn_sig(sig: &FnSig) -> ~[t] { pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> {
vec::append_one(sig.inputs.map(|a| *a), sig.output) vec_ng::append_one(sig.inputs.map(|a| *a), sig.output)
} }
// Type accessors for AST nodes // Type accessors for AST nodes
@ -3213,7 +3215,7 @@ impl AutoRef {
} }
pub struct ParamsTy { pub struct ParamsTy {
params: ~[t], params: Vec<t>,
ty: t ty: t
} }
@ -3231,7 +3233,7 @@ pub fn expr_has_ty_params(cx: ctxt, expr: &ast::Expr) -> bool {
} }
pub fn method_call_type_param_defs(tcx: ctxt, origin: typeck::MethodOrigin) pub fn method_call_type_param_defs(tcx: ctxt, origin: typeck::MethodOrigin)
-> Rc<~[TypeParameterDef]> { -> Rc<Vec<TypeParameterDef> > {
match origin { match origin {
typeck::MethodStatic(did) => { typeck::MethodStatic(did) => {
// n.b.: When we encode impl methods, the bounds // n.b.: When we encode impl methods, the bounds
@ -3250,8 +3252,8 @@ pub fn method_call_type_param_defs(tcx: ctxt, origin: typeck::MethodOrigin)
// trait itself. This ought to be harmonized. // trait itself. This ought to be harmonized.
let trait_type_param_defs = let trait_type_param_defs =
lookup_trait_def(tcx, trt_id).generics.type_param_defs(); lookup_trait_def(tcx, trt_id).generics.type_param_defs();
Rc::new(vec::append( Rc::new(vec_ng::append(
trait_type_param_defs.to_owned(), Vec::from_slice(trait_type_param_defs),
ty::trait_method(tcx, ty::trait_method(tcx,
trt_id, trt_id,
n_mth).generics.type_param_defs())) n_mth).generics.type_param_defs()))
@ -3432,9 +3434,11 @@ pub fn expr_kind(tcx: ctxt,
None => fail!("no def for place"), None => fail!("no def for place"),
}; };
let def_id = ast_util::def_id_of_def(definition); let def_id = ast_util::def_id_of_def(definition);
match tcx.lang_items.items[ExchangeHeapLangItem as uint] { match tcx.lang_items.items.get(ExchangeHeapLangItem as uint) {
Some(item_def_id) if def_id == item_def_id => RvalueDatumExpr, &Some(item_def_id) if def_id == item_def_id => {
Some(_) | None => RvalueDpsExpr, RvalueDatumExpr
}
&Some(_) | &None => RvalueDpsExpr,
} }
} }
@ -3480,8 +3484,8 @@ pub fn method_idx(id: ast::Ident, meths: &[@Method]) -> Option<uint> {
/// Returns a vector containing the indices of all type parameters that appear /// Returns a vector containing the indices of all type parameters that appear
/// in `ty`. The vector may contain duplicates. Probably should be converted /// in `ty`. The vector may contain duplicates. Probably should be converted
/// to a bitset or some other representation. /// to a bitset or some other representation.
pub fn param_tys_in_type(ty: t) -> ~[param_ty] { pub fn param_tys_in_type(ty: t) -> Vec<param_ty> {
let mut rslt = ~[]; let mut rslt = Vec::new();
walk_ty(ty, |ty| { walk_ty(ty, |ty| {
match get(ty).sty { match get(ty).sty {
ty_param(p) => { ty_param(p) => {
@ -3496,8 +3500,8 @@ pub fn param_tys_in_type(ty: t) -> ~[param_ty] {
pub fn occurs_check(tcx: ctxt, sp: Span, vid: TyVid, rt: t) { pub fn occurs_check(tcx: ctxt, sp: Span, vid: TyVid, rt: t) {
// Returns a vec of all the type variables occurring in `ty`. It may // Returns a vec of all the type variables occurring in `ty`. It may
// contain duplicates. (Integral type vars aren't counted.) // contain duplicates. (Integral type vars aren't counted.)
fn vars_in_type(ty: t) -> ~[TyVid] { fn vars_in_type(ty: t) -> Vec<TyVid> {
let mut rslt = ~[]; let mut rslt = Vec::new();
walk_ty(ty, |ty| { walk_ty(ty, |ty| {
match get(ty).sty { match get(ty).sty {
ty_infer(TyVar(v)) => rslt.push(v), ty_infer(TyVar(v)) => rslt.push(v),
@ -3742,7 +3746,7 @@ pub fn provided_source(cx: ctxt, id: ast::DefId) -> Option<ast::DefId> {
provided_method_sources.get().find(&id).map(|x| *x) provided_method_sources.get().find(&id).map(|x| *x)
} }
pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] { pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> Vec<@Method> {
if is_local(id) { if is_local(id) {
{ {
match cx.map.find(id.node) { match cx.map.find(id.node) {
@ -3774,7 +3778,7 @@ pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] {
} }
} }
pub fn trait_supertraits(cx: ctxt, id: ast::DefId) -> @~[@TraitRef] { pub fn trait_supertraits(cx: ctxt, id: ast::DefId) -> @Vec<@TraitRef> {
// Check the cache. // Check the cache.
{ {
let supertraits = cx.supertraits.borrow(); let supertraits = cx.supertraits.borrow();
@ -3796,7 +3800,7 @@ pub fn trait_supertraits(cx: ctxt, id: ast::DefId) -> @~[@TraitRef] {
return result; return result;
} }
pub fn trait_ref_supertraits(cx: ctxt, trait_ref: &ty::TraitRef) -> ~[@TraitRef] { pub fn trait_ref_supertraits(cx: ctxt, trait_ref: &ty::TraitRef) -> Vec<@TraitRef> {
let supertrait_refs = trait_supertraits(cx, trait_ref.def_id); let supertrait_refs = trait_supertraits(cx, trait_ref.def_id);
supertrait_refs.map( supertrait_refs.map(
|supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs)) |supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs))
@ -3831,12 +3835,12 @@ fn lookup_locally_or_in_crate_store<V:Clone>(
} }
pub fn trait_method(cx: ctxt, trait_did: ast::DefId, idx: uint) -> @Method { pub fn trait_method(cx: ctxt, trait_did: ast::DefId, idx: uint) -> @Method {
let method_def_id = ty::trait_method_def_ids(cx, trait_did)[idx]; let method_def_id = *ty::trait_method_def_ids(cx, trait_did).get(idx);
ty::method(cx, method_def_id) ty::method(cx, method_def_id)
} }
pub fn trait_methods(cx: ctxt, trait_did: ast::DefId) -> @~[@Method] { pub fn trait_methods(cx: ctxt, trait_did: ast::DefId) -> @Vec<@Method> {
let mut trait_methods_cache = cx.trait_methods_cache.borrow_mut(); let mut trait_methods_cache = cx.trait_methods_cache.borrow_mut();
match trait_methods_cache.get().find(&trait_did) { match trait_methods_cache.get().find(&trait_did) {
Some(&methods) => methods, Some(&methods) => methods,
@ -3856,7 +3860,7 @@ pub fn method(cx: ctxt, id: ast::DefId) -> @Method {
}) })
} }
pub fn trait_method_def_ids(cx: ctxt, id: ast::DefId) -> @~[DefId] { pub fn trait_method_def_ids(cx: ctxt, id: ast::DefId) -> @Vec<DefId> {
let mut trait_method_def_ids = cx.trait_method_def_ids.borrow_mut(); let mut trait_method_def_ids = cx.trait_method_def_ids.borrow_mut();
lookup_locally_or_in_crate_store("trait_method_def_ids", lookup_locally_or_in_crate_store("trait_method_def_ids",
id, id,
@ -3934,8 +3938,8 @@ pub fn ty_to_def_id(ty: t) -> Option<ast::DefId> {
// Enum information // Enum information
#[deriving(Clone)] #[deriving(Clone)]
pub struct VariantInfo { pub struct VariantInfo {
args: ~[t], args: Vec<t>,
arg_names: Option<~[ast::Ident]>, arg_names: Option<Vec<ast::Ident> >,
ctor_ty: t, ctor_ty: t,
name: ast::Ident, name: ast::Ident,
id: ast::DefId, id: ast::DefId,
@ -3955,7 +3959,11 @@ impl VariantInfo {
match ast_variant.node.kind { match ast_variant.node.kind {
ast::TupleVariantKind(ref args) => { ast::TupleVariantKind(ref args) => {
let arg_tys = if args.len() > 0 { ty_fn_args(ctor_ty).map(|a| *a) } else { ~[] }; let arg_tys = if args.len() > 0 {
ty_fn_args(ctor_ty).map(|a| *a)
} else {
Vec::new()
};
return VariantInfo { return VariantInfo {
args: arg_tys, args: arg_tys,
@ -3974,13 +3982,13 @@ impl VariantInfo {
assert!(fields.len() > 0); assert!(fields.len() > 0);
let arg_tys = ty_fn_args(ctor_ty).map(|a| *a); let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
let arg_names = fields.map(|field| { let arg_names = fields.iter().map(|field| {
match field.node.kind { match field.node.kind {
NamedField(ident, _) => ident, NamedField(ident, _) => ident,
UnnamedField => cx.sess.bug( UnnamedField => cx.sess.bug(
"enum_variants: all fields in struct must have a name") "enum_variants: all fields in struct must have a name")
} }
}); }).collect();
return VariantInfo { return VariantInfo {
args: arg_tys, args: arg_tys,
@ -3999,7 +4007,7 @@ impl VariantInfo {
pub fn substd_enum_variants(cx: ctxt, pub fn substd_enum_variants(cx: ctxt,
id: ast::DefId, id: ast::DefId,
substs: &substs) substs: &substs)
-> ~[@VariantInfo] { -> Vec<@VariantInfo> {
enum_variants(cx, id).iter().map(|variant_info| { enum_variants(cx, id).iter().map(|variant_info| {
let substd_args = variant_info.args.iter() let substd_args = variant_info.args.iter()
.map(|aty| subst(cx, substs, *aty)).collect(); .map(|aty| subst(cx, substs, *aty)).collect();
@ -4080,7 +4088,7 @@ pub fn type_is_empty(cx: ctxt, t: t) -> bool {
} }
} }
pub fn enum_variants(cx: ctxt, id: ast::DefId) -> @~[@VariantInfo] { pub fn enum_variants(cx: ctxt, id: ast::DefId) -> @Vec<@VariantInfo> {
{ {
let enum_var_cache = cx.enum_var_cache.borrow(); let enum_var_cache = cx.enum_var_cache.borrow();
match enum_var_cache.get().find(&id) { match enum_var_cache.get().find(&id) {
@ -4170,8 +4178,10 @@ pub fn enum_variant_with_id(cx: ctxt,
let variants = enum_variants(cx, enum_id); let variants = enum_variants(cx, enum_id);
let mut i = 0; let mut i = 0;
while i < variants.len() { while i < variants.len() {
let variant = variants[i]; let variant = *variants.get(i);
if variant.id == variant_id { return variant; } if variant.id == variant_id {
return variant
}
i += 1; i += 1;
} }
cx.sess.bug("enum_variant_with_id(): no variant exists with that ID"); cx.sess.bug("enum_variant_with_id(): no variant exists with that ID");
@ -4295,7 +4305,7 @@ pub fn lookup_field_type(tcx: ctxt,
// Look up the list of field names and IDs for a given struct // Look up the list of field names and IDs for a given struct
// Fails if the id is not bound to a struct. // Fails if the id is not bound to a struct.
pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] { pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> Vec<field_ty> {
if did.krate == ast::LOCAL_CRATE { if did.krate == ast::LOCAL_CRATE {
{ {
match cx.map.find(did.node) { match cx.map.find(did.node) {
@ -4342,8 +4352,8 @@ pub fn lookup_struct_field(cx: ctxt,
} }
} }
fn struct_field_tys(fields: &[StructField]) -> ~[field_ty] { fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> {
fields.map(|field| { fields.iter().map(|field| {
match field.node.kind { match field.node.kind {
NamedField(ident, visibility) => { NamedField(ident, visibility) => {
field_ty { field_ty {
@ -4360,13 +4370,13 @@ fn struct_field_tys(fields: &[StructField]) -> ~[field_ty] {
} }
} }
} }
}) }).collect()
} }
// Returns a list of fields corresponding to the struct's items. trans uses // Returns a list of fields corresponding to the struct's items. trans uses
// this. Takes a list of substs with which to instantiate field types. // this. Takes a list of substs with which to instantiate field types.
pub fn struct_fields(cx: ctxt, did: ast::DefId, substs: &substs) pub fn struct_fields(cx: ctxt, did: ast::DefId, substs: &substs)
-> ~[field] { -> Vec<field> {
lookup_struct_fields(cx, did).map(|f| { lookup_struct_fields(cx, did).map(|f| {
field { field {
// FIXME #6993: change type of field to Name and get rid of new() // FIXME #6993: change type of field to Name and get rid of new()
@ -4451,8 +4461,8 @@ pub fn is_binopable(cx: ctxt, ty: t, op: ast::BinOp) -> bool {
return tbl[tycat(cx, ty)][opcat(op)]; return tbl[tycat(cx, ty)][opcat(op)];
} }
pub fn ty_params_to_tys(tcx: ty::ctxt, generics: &ast::Generics) -> ~[t] { pub fn ty_params_to_tys(tcx: ty::ctxt, generics: &ast::Generics) -> Vec<t> {
vec::from_fn(generics.ty_params.len(), |i| { Vec::from_fn(generics.ty_params.len(), |i| {
let id = generics.ty_params.get(i).id; let id = generics.ty_params.get(i).id;
ty::mk_param(tcx, i, ast_util::local_def(id)) ty::mk_param(tcx, i, ast_util::local_def(id))
}) })
@ -4504,7 +4514,7 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t {
-> substs { -> substs {
substs { regions: ErasedRegions, substs { regions: ErasedRegions,
self_ty: ty_fold::fold_opt_ty(self, substs.self_ty), self_ty: ty_fold::fold_opt_ty(self, substs.self_ty),
tps: ty_fold::fold_ty_vec(self, substs.tps) } tps: ty_fold::fold_ty_vec(self, substs.tps.as_slice()) }
} }
fn fold_sig(&mut self, fn fold_sig(&mut self,
@ -4512,10 +4522,12 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t {
-> ty::FnSig { -> ty::FnSig {
// The binder-id is only relevant to bound regions, which // The binder-id is only relevant to bound regions, which
// are erased at trans time. // are erased at trans time.
ty::FnSig { binder_id: ast::DUMMY_NODE_ID, ty::FnSig {
inputs: ty_fold::fold_ty_vec(self, sig.inputs), binder_id: ast::DUMMY_NODE_ID,
output: self.fold_ty(sig.output), inputs: ty_fold::fold_ty_vec(self, sig.inputs.as_slice()),
variadic: sig.variadic } output: self.fold_ty(sig.output),
variadic: sig.variadic,
}
} }
} }
} }
@ -4607,7 +4619,7 @@ pub fn each_bound_trait_and_supertraits(tcx: ctxt,
-> bool { -> bool {
for &bound_trait_ref in bounds.iter() { for &bound_trait_ref in bounds.iter() {
let mut supertrait_set = HashMap::new(); let mut supertrait_set = HashMap::new();
let mut trait_refs = ~[]; let mut trait_refs = Vec::new();
let mut i = 0; let mut i = 0;
// Seed the worklist with the trait from the bound // Seed the worklist with the trait from the bound
@ -4617,14 +4629,15 @@ pub fn each_bound_trait_and_supertraits(tcx: ctxt,
// Add the given trait ty to the hash map // Add the given trait ty to the hash map
while i < trait_refs.len() { while i < trait_refs.len() {
debug!("each_bound_trait_and_supertraits(i={:?}, trait_ref={})", debug!("each_bound_trait_and_supertraits(i={:?}, trait_ref={})",
i, trait_refs[i].repr(tcx)); i, trait_refs.get(i).repr(tcx));
if !f(trait_refs[i]) { if !f(*trait_refs.get(i)) {
return false; return false;
} }
// Add supertraits to supertrait_set // Add supertraits to supertrait_set
let supertrait_refs = trait_ref_supertraits(tcx, trait_refs[i]); let supertrait_refs = trait_ref_supertraits(tcx,
*trait_refs.get(i));
for &supertrait_ref in supertrait_refs.iter() { for &supertrait_ref in supertrait_refs.iter() {
debug!("each_bound_trait_and_supertraits(supertrait_ref={})", debug!("each_bound_trait_and_supertraits(supertrait_ref={})",
supertrait_ref.repr(tcx)); supertrait_ref.repr(tcx));
@ -4648,7 +4661,7 @@ pub fn count_traits_and_supertraits(tcx: ctxt,
let mut total = 0; let mut total = 0;
for type_param_def in type_param_defs.iter() { for type_param_def in type_param_defs.iter() {
each_bound_trait_and_supertraits( each_bound_trait_and_supertraits(
tcx, type_param_def.bounds.trait_bounds, |_| { tcx, type_param_def.bounds.trait_bounds.as_slice(), |_| {
total += 1; total += 1;
true true
}); });
@ -4681,7 +4694,7 @@ pub fn visitor_object_ty(tcx: ctxt,
let substs = substs { let substs = substs {
regions: ty::NonerasedRegions(opt_vec::Empty), regions: ty::NonerasedRegions(opt_vec::Empty),
self_ty: None, self_ty: None,
tps: ~[] tps: Vec::new()
}; };
let trait_ref = @TraitRef { def_id: trait_lang_item, substs: substs }; let trait_ref = @TraitRef { def_id: trait_lang_item, substs: substs };
Ok((trait_ref, Ok((trait_ref,
@ -4708,7 +4721,7 @@ fn record_trait_implementation(tcx: ctxt,
let mut trait_impls = tcx.trait_impls.borrow_mut(); let mut trait_impls = tcx.trait_impls.borrow_mut();
match trait_impls.get().find(&trait_def_id) { match trait_impls.get().find(&trait_def_id) {
None => { None => {
implementation_list = @RefCell::new(~[]); implementation_list = @RefCell::new(Vec::new());
trait_impls.get().insert(trait_def_id, implementation_list); trait_impls.get().insert(trait_def_id, implementation_list);
} }
Some(&existing_implementation_list) => { Some(&existing_implementation_list) => {
@ -4763,7 +4776,7 @@ pub fn populate_implementations_for_type_if_necessary(tcx: ctxt,
let mut inherent_impls = tcx.inherent_impls.borrow_mut(); let mut inherent_impls = tcx.inherent_impls.borrow_mut();
match inherent_impls.get().find(&type_id) { match inherent_impls.get().find(&type_id) {
None => { None => {
implementation_list = @RefCell::new(~[]); implementation_list = @RefCell::new(Vec::new());
inherent_impls.get().insert(type_id, implementation_list); inherent_impls.get().insert(type_id, implementation_list);
} }
Some(&existing_implementation_list) => { Some(&existing_implementation_list) => {
@ -5080,7 +5093,7 @@ pub fn construct_parameter_environment(
let num_item_type_params = item_type_params.len(); let num_item_type_params = item_type_params.len();
let num_method_type_params = method_type_params.len(); let num_method_type_params = method_type_params.len();
let num_type_params = num_item_type_params + num_method_type_params; let num_type_params = num_item_type_params + num_method_type_params;
let type_params = vec::from_fn(num_type_params, |i| { let type_params = Vec::from_fn(num_type_params, |i| {
let def_id = if i < num_item_type_params { let def_id = if i < num_item_type_params {
item_type_params[i].def_id item_type_params[i].def_id
} else { } else {
@ -5108,7 +5121,7 @@ pub fn construct_parameter_environment(
// //
let self_bound_substd = self_bound.map(|b| b.subst(tcx, &free_substs)); let self_bound_substd = self_bound.map(|b| b.subst(tcx, &free_substs));
let type_param_bounds_substd = vec::from_fn(num_type_params, |i| { let type_param_bounds_substd = Vec::from_fn(num_type_params, |i| {
if i < num_item_type_params { if i < num_item_type_params {
(*item_type_params[i].bounds).subst(tcx, &free_substs) (*item_type_params[i].bounds).subst(tcx, &free_substs)
} else { } else {
@ -5128,7 +5141,7 @@ impl substs {
pub fn empty() -> substs { pub fn empty() -> substs {
substs { substs {
self_ty: None, self_ty: None,
tps: ~[], tps: Vec::new(),
regions: NonerasedRegions(opt_vec::Empty) regions: NonerasedRegions(opt_vec::Empty)
} }
} }

View File

@ -13,6 +13,8 @@
use middle::ty; use middle::ty;
use util::ppaux::Repr; use util::ppaux::Repr;
use std::vec_ng::Vec;
pub trait TypeFolder { pub trait TypeFolder {
fn tcx(&self) -> ty::ctxt; fn tcx(&self) -> ty::ctxt;
@ -84,10 +86,8 @@ pub fn fold_opt_ty<T:TypeFolder>(this: &mut T,
t.map(|t| this.fold_ty(t)) t.map(|t| this.fold_ty(t))
} }
pub fn fold_ty_vec<T:TypeFolder>(this: &mut T, pub fn fold_ty_vec<T:TypeFolder>(this: &mut T, tys: &[ty::t]) -> Vec<ty::t> {
tys: &[ty::t]) tys.iter().map(|t| this.fold_ty(*t)).collect()
-> ~[ty::t] {
tys.map(|t| this.fold_ty(*t))
} }
pub fn super_fold_ty<T:TypeFolder>(this: &mut T, pub fn super_fold_ty<T:TypeFolder>(this: &mut T,
@ -110,14 +110,14 @@ pub fn super_fold_substs<T:TypeFolder>(this: &mut T,
ty::substs { regions: regions, ty::substs { regions: regions,
self_ty: fold_opt_ty(this, substs.self_ty), self_ty: fold_opt_ty(this, substs.self_ty),
tps: fold_ty_vec(this, substs.tps), } tps: fold_ty_vec(this, substs.tps.as_slice()), }
} }
pub fn super_fold_sig<T:TypeFolder>(this: &mut T, pub fn super_fold_sig<T:TypeFolder>(this: &mut T,
sig: &ty::FnSig) sig: &ty::FnSig)
-> ty::FnSig { -> ty::FnSig {
ty::FnSig { binder_id: sig.binder_id, ty::FnSig { binder_id: sig.binder_id,
inputs: fold_ty_vec(this, sig.inputs), inputs: fold_ty_vec(this, sig.inputs.as_slice()),
output: this.fold_ty(sig.output), output: this.fold_ty(sig.output),
variadic: sig.variadic } variadic: sig.variadic }
} }
@ -166,7 +166,7 @@ pub fn super_fold_sty<T:TypeFolder>(this: &mut T,
bounds) bounds)
} }
ty::ty_tup(ref ts) => { ty::ty_tup(ref ts) => {
ty::ty_tup(fold_ty_vec(this, *ts)) ty::ty_tup(fold_ty_vec(this, ts.as_slice()))
} }
ty::ty_bare_fn(ref f) => { ty::ty_bare_fn(ref f) => {
ty::ty_bare_fn(this.fold_bare_fn_ty(f)) ty::ty_bare_fn(this.fold_bare_fn_ty(f))

View File

@ -136,7 +136,7 @@ fn opt_ast_region_to_region<AC:AstConv,RS:RegionScope>(
} }
Ok(rs) => { Ok(rs) => {
rs[0] *rs.get(0)
} }
} }
} }
@ -791,7 +791,11 @@ pub fn ty_of_closure<AC:AstConv,RS:RegionScope>(
let expected_arg_ty = expected_sig.as_ref().and_then(|e| { let expected_arg_ty = expected_sig.as_ref().and_then(|e| {
// no guarantee that the correct number of expected args // no guarantee that the correct number of expected args
// were supplied // were supplied
if i < e.inputs.len() {Some(e.inputs[i])} else {None} if i < e.inputs.len() {
Some(*e.inputs.get(i))
} else {
None
}
}); });
ty_of_arg(this, &rb, a, expected_arg_ty) ty_of_arg(this, &rb, a, expected_arg_ty)
}).collect(); }).collect();

View File

@ -115,7 +115,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
let fcx = pcx.fcx; let fcx = pcx.fcx;
let tcx = pcx.fcx.ccx.tcx; let tcx = pcx.fcx.ccx.tcx;
let arg_types: ~[ty::t]; let arg_types: Vec<ty::t> ;
let kind_name; let kind_name;
// structure_of requires type variables to be resolved. // structure_of requires type variables to be resolved.
@ -295,7 +295,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
span: Span, span: Span,
path: &ast::Path, path: &ast::Path,
fields: &[ast::FieldPat], fields: &[ast::FieldPat],
class_fields: ~[ty::field_ty], class_fields: Vec<ty::field_ty> ,
class_id: ast::DefId, class_id: ast::DefId,
substitutions: &ty::substs, substitutions: &ty::substs,
etc: bool) { etc: bool) {
@ -319,7 +319,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
} }
Some(&(index, ref mut used)) => { Some(&(index, ref mut used)) => {
*used = true; *used = true;
let class_field = class_fields[index]; let class_field = *class_fields.get(index);
let field_type = ty::lookup_field_type(tcx, let field_type = ty::lookup_field_type(tcx,
class_id, class_id,
class_field.id, class_field.id,
@ -562,7 +562,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
supplied_def_id, supplied_def_id,
&ty::substs { &ty::substs {
self_ty: None, self_ty: None,
tps: ~[], tps: Vec::new(),
regions: ty::ErasedRegions, regions: ty::ErasedRegions,
}); });
} }
@ -585,7 +585,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
match *s { match *s {
ty::ty_tup(ref ex_elts) if e_count == ex_elts.len() => { ty::ty_tup(ref ex_elts) if e_count == ex_elts.len() => {
for (i, elt) in elts.iter().enumerate() { for (i, elt) in elts.iter().enumerate() {
check_pat(pcx, *elt, ex_elts[i]); check_pat(pcx, *elt, *ex_elts.get(i));
} }
fcx.write_ty(pat.id, expected); fcx.write_ty(pat.id, expected);
} }

View File

@ -100,7 +100,8 @@ use util::ppaux::Repr;
use std::cell::RefCell; use std::cell::RefCell;
use collections::HashSet; use collections::HashSet;
use std::result; use std::result;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
use syntax::ast::{DefId, SelfValue, SelfRegion}; use syntax::ast::{DefId, SelfValue, SelfRegion};
use syntax::ast::{SelfUniq, SelfStatic}; use syntax::ast::{SelfUniq, SelfStatic};
use syntax::ast::{MutMutable, MutImmutable}; use syntax::ast::{MutMutable, MutImmutable};
@ -139,8 +140,8 @@ pub fn lookup(
m_name: m_name, m_name: m_name,
supplied_tps: supplied_tps, supplied_tps: supplied_tps,
impl_dups: @RefCell::new(HashSet::new()), impl_dups: @RefCell::new(HashSet::new()),
inherent_candidates: @RefCell::new(~[]), inherent_candidates: @RefCell::new(Vec::new()),
extension_candidates: @RefCell::new(~[]), extension_candidates: @RefCell::new(Vec::new()),
deref_args: deref_args, deref_args: deref_args,
check_traits: check_traits, check_traits: check_traits,
autoderef_receiver: autoderef_receiver, autoderef_receiver: autoderef_receiver,
@ -184,8 +185,8 @@ pub fn lookup_in_trait(
m_name: m_name, m_name: m_name,
supplied_tps: supplied_tps, supplied_tps: supplied_tps,
impl_dups: @RefCell::new(HashSet::new()), impl_dups: @RefCell::new(HashSet::new()),
inherent_candidates: @RefCell::new(~[]), inherent_candidates: @RefCell::new(Vec::new()),
extension_candidates: @RefCell::new(~[]), extension_candidates: @RefCell::new(Vec::new()),
deref_args: check::DoDerefArgs, deref_args: check::DoDerefArgs,
check_traits: CheckTraitsOnly, check_traits: CheckTraitsOnly,
autoderef_receiver: autoderef_receiver, autoderef_receiver: autoderef_receiver,
@ -208,8 +209,8 @@ pub struct LookupContext<'a> {
m_name: ast::Name, m_name: ast::Name,
supplied_tps: &'a [ty::t], supplied_tps: &'a [ty::t],
impl_dups: @RefCell<HashSet<DefId>>, impl_dups: @RefCell<HashSet<DefId>>,
inherent_candidates: @RefCell<~[Candidate]>, inherent_candidates: @RefCell<Vec<Candidate> >,
extension_candidates: @RefCell<~[Candidate]>, extension_candidates: @RefCell<Vec<Candidate> >,
deref_args: check::DerefArgs, deref_args: check::DerefArgs,
check_traits: CheckTraitsFlag, check_traits: CheckTraitsFlag,
autoderef_receiver: AutoderefReceiverFlag, autoderef_receiver: AutoderefReceiverFlag,
@ -311,8 +312,8 @@ impl<'a> LookupContext<'a> {
// Candidate collection (see comment at start of file) // Candidate collection (see comment at start of file)
fn reset_candidates(&self) { fn reset_candidates(&self) {
self.inherent_candidates.set(~[]); self.inherent_candidates.set(Vec::new());
self.extension_candidates.set(~[]); self.extension_candidates.set(Vec::new());
} }
fn push_inherent_candidates(&self, self_ty: ty::t) { fn push_inherent_candidates(&self, self_ty: ty::t) {
@ -450,7 +451,7 @@ impl<'a> LookupContext<'a> {
self.get_method_index(new_trait_ref, trait_ref, method_num); self.get_method_index(new_trait_ref, trait_ref, method_num);
let mut m = (*m).clone(); let mut m = (*m).clone();
// We need to fix up the transformed self type. // We need to fix up the transformed self type.
m.fty.sig.inputs[0] = *m.fty.sig.inputs.get_mut(0) =
self.construct_transformed_self_ty_for_object( self.construct_transformed_self_ty_for_object(
did, &rcvr_substs, &m); did, &rcvr_substs, &m);
@ -476,7 +477,13 @@ impl<'a> LookupContext<'a> {
param_ty); param_ty);
self.push_inherent_candidates_from_bounds( self.push_inherent_candidates_from_bounds(
rcvr_ty, rcvr_ty,
self.fcx.inh.param_env.type_param_bounds[param_ty.idx].trait_bounds, self.fcx
.inh
.param_env
.type_param_bounds
.get(param_ty.idx)
.trait_bounds
.as_slice(),
restrict_to, restrict_to,
param_numbered(param_ty.idx)); param_numbered(param_ty.idx));
} }
@ -541,10 +548,9 @@ impl<'a> LookupContext<'a> {
let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id); let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id);
match trait_methods.iter().position(|m| { match trait_methods.iter().position(|m| {
m.explicit_self != ast::SelfStatic && m.explicit_self != ast::SelfStatic &&
m.ident.name == self.m_name }) m.ident.name == self.m_name }) {
{
Some(pos) => { Some(pos) => {
let method = trait_methods[pos]; let method = *trait_methods.get(pos);
match mk_cand(bound_trait_ref, method, pos, this_bound_idx) { match mk_cand(bound_trait_ref, method, pos, this_bound_idx) {
Some(cand) => { Some(cand) => {
@ -584,7 +590,7 @@ impl<'a> LookupContext<'a> {
} }
fn push_candidates_from_impl(&self, fn push_candidates_from_impl(&self,
candidates: &mut ~[Candidate], candidates: &mut Vec<Candidate> ,
impl_info: &ty::Impl) { impl_info: &ty::Impl) {
{ {
let mut impl_dups = self.impl_dups.borrow_mut(); let mut impl_dups = self.impl_dups.borrow_mut();
@ -599,13 +605,16 @@ impl<'a> LookupContext<'a> {
impl_info.methods.map(|m| m.ident).repr(self.tcx())); impl_info.methods.map(|m| m.ident).repr(self.tcx()));
let idx = { let idx = {
match impl_info.methods.iter().position(|m| m.ident.name == self.m_name) { match impl_info.methods
.iter()
.position(|m| m.ident.name == self.m_name) {
Some(idx) => idx, Some(idx) => idx,
None => { return; } // No method with the right name. None => { return; } // No method with the right name.
} }
}; };
let method = ty::method(self.tcx(), impl_info.methods[idx].def_id); let method = ty::method(self.tcx(),
impl_info.methods.get(idx).def_id);
// determine the `self` of the impl with fresh // determine the `self` of the impl with fresh
// variables for each parameter: // variables for each parameter:
@ -892,14 +901,15 @@ impl<'a> LookupContext<'a> {
fn consider_candidates(&self, fn consider_candidates(&self,
rcvr_ty: ty::t, rcvr_ty: ty::t,
candidates: &mut ~[Candidate]) candidates: &mut Vec<Candidate> )
-> Option<MethodCallee> { -> Option<MethodCallee> {
// FIXME(pcwalton): Do we need to clone here? // FIXME(pcwalton): Do we need to clone here?
let relevant_candidates: ~[Candidate] = let relevant_candidates: Vec<Candidate> =
candidates.iter().map(|c| (*c).clone()). candidates.iter().map(|c| (*c).clone()).
filter(|c| self.is_relevant(rcvr_ty, c)).collect(); filter(|c| self.is_relevant(rcvr_ty, c)).collect();
let relevant_candidates = self.merge_candidates(relevant_candidates); let relevant_candidates =
self.merge_candidates(relevant_candidates.as_slice());
if relevant_candidates.len() == 0 { if relevant_candidates.len() == 0 {
return None; return None;
@ -914,11 +924,11 @@ impl<'a> LookupContext<'a> {
} }
} }
Some(self.confirm_candidate(rcvr_ty, &relevant_candidates[0])) Some(self.confirm_candidate(rcvr_ty, relevant_candidates.get(0)))
} }
fn merge_candidates(&self, candidates: &[Candidate]) -> ~[Candidate] { fn merge_candidates(&self, candidates: &[Candidate]) -> Vec<Candidate> {
let mut merged = ~[]; let mut merged = Vec::new();
let mut i = 0; let mut i = 0;
while i < candidates.len() { while i < candidates.len() {
let candidate_a = &candidates[i]; let candidate_a = &candidates[i];
@ -1004,14 +1014,15 @@ impl<'a> LookupContext<'a> {
parameters given for this method"); parameters given for this method");
self.fcx.infcx().next_ty_vars(num_method_tps) self.fcx.infcx().next_ty_vars(num_method_tps)
} else { } else {
self.supplied_tps.to_owned() Vec::from_slice(self.supplied_tps)
} }
}; };
// Construct the full set of type parameters for the method, // Construct the full set of type parameters for the method,
// which is equal to the class tps + the method tps. // which is equal to the class tps + the method tps.
let all_substs = substs { let all_substs = substs {
tps: vec::append(candidate.rcvr_substs.tps.clone(), m_substs), tps: vec_ng::append(candidate.rcvr_substs.tps.clone(),
m_substs.as_slice()),
regions: candidate.rcvr_substs.regions.clone(), regions: candidate.rcvr_substs.regions.clone(),
self_ty: candidate.rcvr_substs.self_ty, self_ty: candidate.rcvr_substs.self_ty,
}; };
@ -1031,7 +1042,7 @@ impl<'a> LookupContext<'a> {
let args = fn_sig.inputs.slice_from(1).iter().map(|t| { let args = fn_sig.inputs.slice_from(1).iter().map(|t| {
t.subst(tcx, &all_substs) t.subst(tcx, &all_substs)
}); });
Some(fn_sig.inputs[0]).move_iter().chain(args).collect() Some(*fn_sig.inputs.get(0)).move_iter().chain(args).collect()
} }
_ => fn_sig.inputs.subst(tcx, &all_substs) _ => fn_sig.inputs.subst(tcx, &all_substs)
}; };
@ -1050,7 +1061,7 @@ impl<'a> LookupContext<'a> {
self.fcx.infcx().next_region_var( self.fcx.infcx().next_region_var(
infer::BoundRegionInFnCall(self.expr.span, br)) infer::BoundRegionInFnCall(self.expr.span, br))
}); });
let transformed_self_ty = fn_sig.inputs[0]; let transformed_self_ty = *fn_sig.inputs.get(0);
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy { let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {
sig: fn_sig, sig: fn_sig,
purity: bare_fn_ty.purity, purity: bare_fn_ty.purity,
@ -1118,7 +1129,7 @@ impl<'a> LookupContext<'a> {
ty::mk_err() // error reported in `enforce_object_limitations()` ty::mk_err() // error reported in `enforce_object_limitations()`
} }
ast::SelfRegion(..) | ast::SelfUniq => { ast::SelfRegion(..) | ast::SelfUniq => {
let transformed_self_ty = method_ty.fty.sig.inputs[0]; let transformed_self_ty = *method_ty.fty.sig.inputs.get(0);
match ty::get(transformed_self_ty).sty { match ty::get(transformed_self_ty).sty {
ty::ty_rptr(r, mt) => { // must be SelfRegion ty::ty_rptr(r, mt) => { // must be SelfRegion
ty::mk_trait(self.tcx(), trait_def_id, ty::mk_trait(self.tcx(), trait_def_id,

View File

@ -118,6 +118,8 @@ use collections::HashMap;
use std::mem::replace; use std::mem::replace;
use std::result; use std::result;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use std::vec_ng;
use syntax::abi::AbiSet; use syntax::abi::AbiSet;
use syntax::ast::{Provided, Required}; use syntax::ast::{Provided, Required};
use syntax::ast; use syntax::ast;
@ -280,7 +282,7 @@ pub fn blank_fn_ctxt(ccx: @CrateCtxt,
// and statement context, but we might as well do write the code only once // and statement context, but we might as well do write the code only once
let param_env = ty::ParameterEnvironment { free_substs: substs::empty(), let param_env = ty::ParameterEnvironment { free_substs: substs::empty(),
self_param_bound: None, self_param_bound: None,
type_param_bounds: ~[] }; type_param_bounds: Vec::new() };
@FnCtxt { @FnCtxt {
err_count_on_creation: ccx.tcx.sess.err_count(), err_count_on_creation: ccx.tcx.sess.err_count(),
ret_ty: rty, ret_ty: rty,
@ -510,7 +512,7 @@ fn check_fn(ccx: @CrateCtxt,
} }
pub fn check_no_duplicate_fields(tcx: ty::ctxt, pub fn check_no_duplicate_fields(tcx: ty::ctxt,
fields: ~[(ast::Ident, Span)]) { fields: Vec<(ast::Ident, Span)> ) {
let mut field_names = HashMap::new(); let mut field_names = HashMap::new();
for p in fields.iter() { for p in fields.iter() {
@ -730,7 +732,7 @@ fn check_impl_methods_against_trait(ccx: @CrateCtxt,
// Check for missing methods from trait // Check for missing methods from trait
let provided_methods = ty::provided_trait_methods(tcx, let provided_methods = ty::provided_trait_methods(tcx,
impl_trait_ref.def_id); impl_trait_ref.def_id);
let mut missing_methods = ~[]; let mut missing_methods = Vec::new();
for trait_method in trait_methods.iter() { for trait_method in trait_methods.iter() {
let is_implemented = let is_implemented =
impl_methods.iter().any( impl_methods.iter().any(
@ -887,11 +889,11 @@ fn compare_impl_method(tcx: ty::ctxt,
// in the self type with free regions. So, for example, if the // in the self type with free regions. So, for example, if the
// impl type is "&'a str", then this would replace the self // impl type is "&'a str", then this would replace the self
// type with a free region `self`. // type with a free region `self`.
let dummy_impl_tps: ~[ty::t] = let dummy_impl_tps: Vec<ty::t> =
impl_generics.type_param_defs().iter().enumerate(). impl_generics.type_param_defs().iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i, t.def_id)). map(|(i,t)| ty::mk_param(tcx, i, t.def_id)).
collect(); collect();
let dummy_method_tps: ~[ty::t] = let dummy_method_tps: Vec<ty::t> =
impl_m.generics.type_param_defs().iter().enumerate(). impl_m.generics.type_param_defs().iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i + impl_tps, t.def_id)). map(|(i,t)| ty::mk_param(tcx, i + impl_tps, t.def_id)).
collect(); collect();
@ -902,7 +904,7 @@ fn compare_impl_method(tcx: ty::ctxt,
bound_region: ty::BrNamed(l.def_id, l.ident)})). bound_region: ty::BrNamed(l.def_id, l.ident)})).
collect(); collect();
let dummy_substs = ty::substs { let dummy_substs = ty::substs {
tps: vec::append(dummy_impl_tps, dummy_method_tps), tps: vec_ng::append(dummy_impl_tps, dummy_method_tps.as_slice()),
regions: ty::NonerasedRegions(dummy_impl_regions), regions: ty::NonerasedRegions(dummy_impl_regions),
self_ty: None }; self_ty: None };
@ -929,7 +931,7 @@ fn compare_impl_method(tcx: ty::ctxt,
self_ty: self_ty } = trait_substs.subst(tcx, &dummy_substs); self_ty: self_ty } = trait_substs.subst(tcx, &dummy_substs);
let substs = substs { let substs = substs {
regions: trait_regions, regions: trait_regions,
tps: vec::append(trait_tps, dummy_method_tps), tps: vec_ng::append(trait_tps, dummy_method_tps.as_slice()),
self_ty: self_ty, self_ty: self_ty,
}; };
debug!("trait_fty (pre-subst): {} substs={}", debug!("trait_fty (pre-subst): {} substs={}",
@ -987,8 +989,8 @@ impl FnCtxt {
impl RegionScope for infer::InferCtxt { impl RegionScope for infer::InferCtxt {
fn anon_regions(&self, span: Span, count: uint) fn anon_regions(&self, span: Span, count: uint)
-> Result<~[ty::Region], ()> { -> Result<Vec<ty::Region> , ()> {
Ok(vec::from_fn(count, |_| { Ok(Vec::from_fn(count, |_| {
self.next_region_var(infer::MiscVariable(span)) self.next_region_var(infer::MiscVariable(span))
})) }))
} }
@ -1259,7 +1261,7 @@ pub fn do_autoderef(fcx: @FnCtxt, sp: Span, t: ty::t) -> (ty::t, uint) {
* so that trans/borrowck/etc know about this autoderef. */ * so that trans/borrowck/etc know about this autoderef. */
let mut t1 = t; let mut t1 = t;
let mut enum_dids = ~[]; let mut enum_dids = Vec::new();
let mut autoderefs = 0; let mut autoderefs = 0;
loop { loop {
let sty = structure_of(fcx, sp, t1); let sty = structure_of(fcx, sp, t1);
@ -1672,7 +1674,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
let args = args.slice_from(1); let args = args.slice_from(1);
if ty::type_is_error(method_fn_ty) { if ty::type_is_error(method_fn_ty) {
let err_inputs = err_args(args.len()); let err_inputs = err_args(args.len());
check_argument_types(fcx, sp, err_inputs, callee_expr, check_argument_types(fcx, sp, err_inputs.as_slice(), callee_expr,
args, deref_args, false); args, deref_args, false);
method_fn_ty method_fn_ty
} else { } else {
@ -1713,10 +1715,10 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
let supplied_arg_count = args.len(); let supplied_arg_count = args.len();
let expected_arg_count = fn_inputs.len(); let expected_arg_count = fn_inputs.len();
let formal_tys = if expected_arg_count == supplied_arg_count { let formal_tys = if expected_arg_count == supplied_arg_count {
fn_inputs.map(|a| *a) fn_inputs.iter().map(|a| *a).collect()
} else if variadic { } else if variadic {
if supplied_arg_count >= expected_arg_count { if supplied_arg_count >= expected_arg_count {
fn_inputs.map(|a| *a) fn_inputs.iter().map(|a| *a).collect()
} else { } else {
let msg = format!( let msg = format!(
"this function takes at least {nexpected, plural, =1{# parameter} \ "this function takes at least {nexpected, plural, =1{# parameter} \
@ -1782,7 +1784,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
if is_block == check_blocks { if is_block == check_blocks {
debug!("checking the argument"); debug!("checking the argument");
let mut formal_ty = formal_tys[i]; let mut formal_ty = *formal_tys.get(i);
match deref_args { match deref_args {
DoDerefArgs => { DoDerefArgs => {
@ -1840,8 +1842,8 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
} }
} }
fn err_args(len: uint) -> ~[ty::t] { fn err_args(len: uint) -> Vec<ty::t> {
vec::from_fn(len, |_| ty::mk_err()) Vec::from_fn(len, |_| ty::mk_err())
} }
fn write_call(fcx: @FnCtxt, call_expr: &ast::Expr, output: ty::t) { fn write_call(fcx: @FnCtxt, call_expr: &ast::Expr, output: ty::t) {
@ -1892,7 +1894,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
}); });
// Call the generic checker. // Call the generic checker.
check_argument_types(fcx, call_expr.span, fn_sig.inputs, f, check_argument_types(fcx, call_expr.span, fn_sig.inputs.as_slice(), f,
args, DontDerefArgs, fn_sig.variadic); args, DontDerefArgs, fn_sig.variadic);
write_call(fcx, call_expr, fn_sig.output); write_call(fcx, call_expr, fn_sig.output);
@ -2310,7 +2312,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
// field // field
debug!("class named {}", ppaux::ty_to_str(tcx, base_t)); debug!("class named {}", ppaux::ty_to_str(tcx, base_t));
let cls_items = ty::lookup_struct_fields(tcx, base_id); let cls_items = ty::lookup_struct_fields(tcx, base_id);
match lookup_field_ty(tcx, base_id, cls_items, match lookup_field_ty(tcx, base_id, cls_items.as_slice(),
field, &(*substs)) { field, &(*substs)) {
Some(field_ty) => { Some(field_ty) => {
// (2) look up what field's type is, and return it // (2) look up what field's type is, and return it
@ -2324,13 +2326,13 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
_ => () _ => ()
} }
let tps: ~[ty::t] = tys.iter().map(|&ty| fcx.to_ty(ty)).collect(); let tps: Vec<ty::t> = tys.iter().map(|&ty| fcx.to_ty(ty)).collect();
match method::lookup(fcx, match method::lookup(fcx,
expr, expr,
base, base,
field, field,
expr_t, expr_t,
tps, tps.as_slice(),
DontDerefArgs, DontDerefArgs,
CheckTraitsAndInherentMethods, CheckTraitsAndInherentMethods,
AutoderefReceiver) { AutoderefReceiver) {
@ -2426,7 +2428,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
// Make sure the programmer specified all the fields. // Make sure the programmer specified all the fields.
assert!(fields_found <= field_types.len()); assert!(fields_found <= field_types.len());
if fields_found < field_types.len() { if fields_found < field_types.len() {
let mut missing_fields = ~[]; let mut missing_fields = Vec::new();
for class_field in field_types.iter() { for class_field in field_types.iter() {
let name = class_field.name; let name = class_field.name;
let (_, seen) = *class_field_map.get(&name); let (_, seen) = *class_field_map.get(&name);
@ -2484,7 +2486,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
class_id, class_id,
id, id,
substitutions, substitutions,
class_fields, class_fields.as_slice(),
fields, fields,
base_expr.is_none()); base_expr.is_none());
if ty::type_is_error(fcx.node_ty(id)) { if ty::type_is_error(fcx.node_ty(id)) {
@ -2542,7 +2544,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
variant_id, variant_id,
id, id,
substitutions, substitutions,
variant_fields, variant_fields.as_slice(),
fields, fields,
true); true);
fcx.write_ty(id, enum_type); fcx.write_ty(id, enum_type);
@ -2621,18 +2623,21 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
// places: the exchange heap and the managed heap. // places: the exchange heap and the managed heap.
let definition = lookup_def(fcx, path.span, place.id); let definition = lookup_def(fcx, path.span, place.id);
let def_id = ast_util::def_id_of_def(definition); let def_id = ast_util::def_id_of_def(definition);
match tcx.lang_items.items[ExchangeHeapLangItem as uint] { match tcx.lang_items
Some(item_def_id) if def_id == item_def_id => { .items
.get(ExchangeHeapLangItem as uint) {
&Some(item_def_id) if def_id == item_def_id => {
fcx.write_ty(id, ty::mk_uniq(tcx, fcx.write_ty(id, ty::mk_uniq(tcx,
fcx.expr_ty(subexpr))); fcx.expr_ty(subexpr)));
checked = true checked = true
} }
Some(_) | None => {} &Some(_) | &None => {}
} }
if !checked { if !checked {
match tcx.lang_items match tcx.lang_items
.items[ManagedHeapLangItem as uint] { .items
Some(item_def_id) if def_id == item_def_id => { .get(ManagedHeapLangItem as uint) {
&Some(item_def_id) if def_id == item_def_id => {
// Assign the magic `Gc<T>` struct. // Assign the magic `Gc<T>` struct.
let gc_struct_id = let gc_struct_id =
match tcx.lang_items match tcx.lang_items
@ -2652,16 +2657,16 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
gc_struct_id, gc_struct_id,
substs { substs {
self_ty: None, self_ty: None,
tps: ~[ tps: vec!(
fcx.expr_ty( fcx.expr_ty(
subexpr) subexpr)
], ),
regions: regions, regions: regions,
}); });
fcx.write_ty(id, sty); fcx.write_ty(id, sty);
checked = true checked = true
} }
Some(_) | None => {} &Some(_) | &None => {}
} }
} }
} }
@ -2750,7 +2755,8 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
ty::ty_struct(did, ref substs) => { ty::ty_struct(did, ref substs) => {
let fields = ty::struct_fields(fcx.tcx(), did, substs); let fields = ty::struct_fields(fcx.tcx(), did, substs);
fields.len() == 1 fields.len() == 1
&& fields[0].ident == token::special_idents::unnamed_field && fields.get(0).ident ==
token::special_idents::unnamed_field
} }
_ => false _ => false
}; };
@ -3129,7 +3135,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
let elt_ts = elts.iter().enumerate().map(|(i, e)| { let elt_ts = elts.iter().enumerate().map(|(i, e)| {
let opt_hint = match flds { let opt_hint = match flds {
Some(ref fs) if i < fs.len() => Some(fs[i]), Some(ref fs) if i < fs.len() => Some(*fs.get(i)),
_ => None _ => None
}; };
check_expr_with_opt_hint(fcx, *e, opt_hint); check_expr_with_opt_hint(fcx, *e, opt_hint);
@ -3492,7 +3498,7 @@ pub fn check_simd(tcx: ty::ctxt, sp: Span, id: ast::NodeId) {
tcx.sess.span_err(sp, "SIMD vector cannot be empty"); tcx.sess.span_err(sp, "SIMD vector cannot be empty");
return; return;
} }
let e = ty::lookup_field_type(tcx, did, fields[0].id, substs); let e = ty::lookup_field_type(tcx, did, fields.get(0).id, substs);
if !fields.iter().all( if !fields.iter().all(
|f| ty::lookup_field_type(tcx, did, f.id, substs) == e) { |f| ty::lookup_field_type(tcx, did, f.id, substs) == e) {
tcx.sess.span_err(sp, "SIMD vector should be homogeneous"); tcx.sess.span_err(sp, "SIMD vector should be homogeneous");
@ -3544,11 +3550,11 @@ pub fn check_enum_variants(ccx: @CrateCtxt,
vs: &[ast::P<ast::Variant>], vs: &[ast::P<ast::Variant>],
id: ast::NodeId, id: ast::NodeId,
hint: attr::ReprAttr) hint: attr::ReprAttr)
-> ~[@ty::VariantInfo] { -> Vec<@ty::VariantInfo> {
let rty = ty::node_id_to_type(ccx.tcx, id); let rty = ty::node_id_to_type(ccx.tcx, id);
let mut variants: ~[@ty::VariantInfo] = ~[]; let mut variants: Vec<@ty::VariantInfo> = Vec::new();
let mut disr_vals: ~[ty::Disr] = ~[]; let mut disr_vals: Vec<ty::Disr> = Vec::new();
let mut prev_disr_val: Option<ty::Disr> = None; let mut prev_disr_val: Option<ty::Disr> = None;
for &v in vs.iter() { for &v in vs.iter() {
@ -3797,7 +3803,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
// Build up the list of type parameters, inserting the self parameter // Build up the list of type parameters, inserting the self parameter
// at the appropriate position. // at the appropriate position.
let mut tps = ~[]; let mut tps = Vec::new();
let mut pushed = false; let mut pushed = false;
for (i, ty) in pth.segments.iter() for (i, ty) in pth.segments.iter()
.flat_map(|segment| segment.types.iter()) .flat_map(|segment| segment.types.iter())
@ -3805,7 +3811,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
.enumerate() { .enumerate() {
match self_parameter_index { match self_parameter_index {
Some(index) if index == i => { Some(index) if index == i => {
tps.push(fcx.infcx().next_ty_vars(1)[0]); tps.push(*fcx.infcx().next_ty_vars(1).get(0));
pushed = true; pushed = true;
} }
_ => {} _ => {}
@ -3829,7 +3835,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
for (i, default) in defaults.skip(ty_substs_len).enumerate() { for (i, default) in defaults.skip(ty_substs_len).enumerate() {
match self_parameter_index { match self_parameter_index {
Some(index) if index == i + ty_substs_len => { Some(index) if index == i + ty_substs_len => {
substs.tps.push(fcx.infcx().next_ty_vars(1)[0]); substs.tps.push(*fcx.infcx().next_ty_vars(1).get(0));
pushed = true; pushed = true;
} }
_ => {} _ => {}
@ -3848,7 +3854,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
// If the self parameter goes at the end, insert it there. // If the self parameter goes at the end, insert it there.
if !pushed && self_parameter_index.is_some() { if !pushed && self_parameter_index.is_some() {
substs.tps.push(fcx.infcx().next_ty_vars(1)[0]) substs.tps.push(*fcx.infcx().next_ty_vars(1).get(0))
} }
assert_eq!(substs.tps.len(), ty_param_count) assert_eq!(substs.tps.len(), ty_param_count)
@ -4024,40 +4030,39 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let name = token::get_ident(it.ident); let name = token::get_ident(it.ident);
let (n_tps, inputs, output) = if name.get().starts_with("atomic_") { let (n_tps, inputs, output) = if name.get().starts_with("atomic_") {
let split : ~[&str] = name.get().split('_').collect(); let split : Vec<&str> = name.get().split('_').collect();
assert!(split.len() >= 2, "Atomic intrinsic not correct format"); assert!(split.len() >= 2, "Atomic intrinsic not correct format");
//We only care about the operation here //We only care about the operation here
match split[1] { match *split.get(1) {
"cxchg" => (1, ~[ty::mk_mut_rptr(tcx, "cxchg" => (1, vec!(ty::mk_mut_rptr(tcx,
ty::ReLateBound(it.id, ty::BrAnon(0)), ty::ReLateBound(it.id, ty::BrAnon(0)),
param(ccx, 0)), param(ccx, 0)),
param(ccx, 0), param(ccx, 0),
param(ccx, 0), param(ccx, 0)), param(ccx, 0)),
], param(ccx, 0)),
"load" => (1, "load" => (1,
~[ vec!(
ty::mk_imm_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), ty::mk_imm_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)),
param(ccx, 0)) param(ccx, 0))
], ),
param(ccx, 0)), param(ccx, 0)),
"store" => (1, "store" => (1,
~[ vec!(
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)),
param(ccx, 0)), param(ccx, 0)),
param(ccx, 0) param(ccx, 0)
], ),
ty::mk_nil()), ty::mk_nil()),
"xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" | "xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" |
"min" | "umax" | "umin" => { "min" | "umax" | "umin" => {
(1, ~[ty::mk_mut_rptr(tcx, (1, vec!(ty::mk_mut_rptr(tcx,
ty::ReLateBound(it.id, ty::BrAnon(0)), ty::ReLateBound(it.id, ty::BrAnon(0)),
param(ccx, 0)), param(ccx, 0) ], param(ccx, 0)), param(ccx, 0) ),
param(ccx, 0)) param(ccx, 0))
} }
"fence" => { "fence" => {
(0, ~[], ty::mk_nil()) (0, Vec::new(), ty::mk_nil())
} }
op => { op => {
tcx.sess.span_err(it.span, tcx.sess.span_err(it.span,
@ -4069,24 +4074,24 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
} else { } else {
match name.get() { match name.get() {
"abort" => (0, ~[], ty::mk_bot()), "abort" => (0, Vec::new(), ty::mk_bot()),
"breakpoint" => (0, ~[], ty::mk_nil()), "breakpoint" => (0, Vec::new(), ty::mk_nil()),
"size_of" | "size_of" |
"pref_align_of" | "min_align_of" => (1u, ~[], ty::mk_uint()), "pref_align_of" | "min_align_of" => (1u, Vec::new(), ty::mk_uint()),
"init" => (1u, ~[], param(ccx, 0u)), "init" => (1u, Vec::new(), param(ccx, 0u)),
"uninit" => (1u, ~[], param(ccx, 0u)), "uninit" => (1u, Vec::new(), param(ccx, 0u)),
"forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()), "forget" => (1u, vec!( param(ccx, 0) ), ty::mk_nil()),
"transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)), "transmute" => (2, vec!( param(ccx, 0) ), param(ccx, 1)),
"move_val_init" => { "move_val_init" => {
(1u, (1u,
~[ vec!(
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), param(ccx, 0)), ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), param(ccx, 0)),
param(ccx, 0u) param(ccx, 0u)
], ),
ty::mk_nil()) ty::mk_nil())
} }
"needs_drop" => (1u, ~[], ty::mk_bool()), "needs_drop" => (1u, Vec::new(), ty::mk_bool()),
"owns_managed" => (1u, ~[], ty::mk_bool()), "owns_managed" => (1u, Vec::new(), ty::mk_bool()),
"get_tydesc" => { "get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
@ -4097,14 +4102,14 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
ty: tydesc_ty, ty: tydesc_ty,
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
}); });
(1u, ~[], td_ptr) (1u, Vec::new(), td_ptr)
} }
"type_id" => { "type_id" => {
let langid = ccx.tcx.lang_items.require(TypeIdLangItem); let langid = ccx.tcx.lang_items.require(TypeIdLangItem);
match langid { match langid {
Ok(did) => (1u, ~[], ty::mk_struct(ccx.tcx, did, substs { Ok(did) => (1u, Vec::new(), ty::mk_struct(ccx.tcx, did, substs {
self_ty: None, self_ty: None,
tps: ~[], tps: Vec::new(),
regions: ty::NonerasedRegions(opt_vec::Empty) regions: ty::NonerasedRegions(opt_vec::Empty)
}) ), }) ),
Err(msg) => { tcx.sess.span_fatal(it.span, msg); } Err(msg) => { tcx.sess.span_fatal(it.span, msg); }
@ -4125,17 +4130,17 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
ty: tydesc_ty, ty: tydesc_ty,
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
}); });
(0, ~[ td_ptr, visitor_object_ty ], ty::mk_nil()) (0, vec!( td_ptr, visitor_object_ty ), ty::mk_nil())
} }
"offset" => { "offset" => {
(1, (1,
~[ vec!(
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0), ty: param(ccx, 0),
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
}), }),
ty::mk_int() ty::mk_int()
], ),
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0), ty: param(ccx, 0),
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
@ -4143,7 +4148,7 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
} }
"copy_nonoverlapping_memory" => { "copy_nonoverlapping_memory" => {
(1, (1,
~[ vec!(
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0), ty: param(ccx, 0),
mutbl: ast::MutMutable mutbl: ast::MutMutable
@ -4153,12 +4158,12 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
}), }),
ty::mk_uint() ty::mk_uint()
], ),
ty::mk_nil()) ty::mk_nil())
} }
"copy_memory" => { "copy_memory" => {
(1, (1,
~[ vec!(
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0), ty: param(ccx, 0),
mutbl: ast::MutMutable mutbl: ast::MutMutable
@ -4168,135 +4173,135 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
}), }),
ty::mk_uint() ty::mk_uint()
], ),
ty::mk_nil()) ty::mk_nil())
} }
"set_memory" => { "set_memory" => {
(1, (1,
~[ vec!(
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0), ty: param(ccx, 0),
mutbl: ast::MutMutable mutbl: ast::MutMutable
}), }),
ty::mk_u8(), ty::mk_u8(),
ty::mk_uint() ty::mk_uint()
], ),
ty::mk_nil()) ty::mk_nil())
} }
"sqrtf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "sqrtf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"sqrtf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "sqrtf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"powif32" => { "powif32" => {
(0, (0,
~[ ty::mk_f32(), ty::mk_i32() ], vec!( ty::mk_f32(), ty::mk_i32() ),
ty::mk_f32()) ty::mk_f32())
} }
"powif64" => { "powif64" => {
(0, (0,
~[ ty::mk_f64(), ty::mk_i32() ], vec!( ty::mk_f64(), ty::mk_i32() ),
ty::mk_f64()) ty::mk_f64())
} }
"sinf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "sinf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"sinf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "sinf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"cosf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "cosf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"cosf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "cosf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"powf32" => { "powf32" => {
(0, (0,
~[ ty::mk_f32(), ty::mk_f32() ], vec!( ty::mk_f32(), ty::mk_f32() ),
ty::mk_f32()) ty::mk_f32())
} }
"powf64" => { "powf64" => {
(0, (0,
~[ ty::mk_f64(), ty::mk_f64() ], vec!( ty::mk_f64(), ty::mk_f64() ),
ty::mk_f64()) ty::mk_f64())
} }
"expf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "expf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"expf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "expf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"exp2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "exp2f32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"exp2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "exp2f64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"logf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "logf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"logf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "logf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"log10f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "log10f32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"log10f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "log10f64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"log2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "log2f32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"log2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "log2f64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"fmaf32" => { "fmaf32" => {
(0, (0,
~[ ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ], vec!( ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ),
ty::mk_f32()) ty::mk_f32())
} }
"fmaf64" => { "fmaf64" => {
(0, (0,
~[ ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ], vec!( ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ),
ty::mk_f64()) ty::mk_f64())
} }
"fabsf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "fabsf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"fabsf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "fabsf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"copysignf32" => (0, ~[ ty::mk_f32(), ty::mk_f32() ], ty::mk_f32()), "copysignf32" => (0, vec!( ty::mk_f32(), ty::mk_f32() ), ty::mk_f32()),
"copysignf64" => (0, ~[ ty::mk_f64(), ty::mk_f64() ], ty::mk_f64()), "copysignf64" => (0, vec!( ty::mk_f64(), ty::mk_f64() ), ty::mk_f64()),
"floorf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "floorf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"floorf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "floorf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"ceilf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "ceilf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"ceilf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "ceilf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"truncf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "truncf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"truncf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "truncf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"rintf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "rintf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"rintf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "rintf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"nearbyintf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "nearbyintf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"nearbyintf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "nearbyintf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"roundf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "roundf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
"roundf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "roundf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"ctpop8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()), "ctpop8" => (0, vec!( ty::mk_i8() ), ty::mk_i8()),
"ctpop16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "ctpop16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
"ctpop32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "ctpop32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
"ctpop64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "ctpop64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
"ctlz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()), "ctlz8" => (0, vec!( ty::mk_i8() ), ty::mk_i8()),
"ctlz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "ctlz16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
"ctlz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "ctlz32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
"ctlz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "ctlz64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
"cttz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()), "cttz8" => (0, vec!( ty::mk_i8() ), ty::mk_i8()),
"cttz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "cttz16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
"cttz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "cttz32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
"cttz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "cttz64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
"bswap16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "bswap16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
"bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "bswap32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
"bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "bswap64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
"volatile_load" => "volatile_load" =>
(1, ~[ ty::mk_imm_ptr(tcx, param(ccx, 0)) ], param(ccx, 0)), (1, vec!( ty::mk_imm_ptr(tcx, param(ccx, 0)) ), param(ccx, 0)),
"volatile_store" => "volatile_store" =>
(1, ~[ ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ], ty::mk_nil()), (1, vec!( ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ), ty::mk_nil()),
"i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" => "i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" =>
(0, ~[ty::mk_i8(), ty::mk_i8()], (0, vec!(ty::mk_i8(), ty::mk_i8()),
ty::mk_tup(tcx, ~[ty::mk_i8(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_i8(), ty::mk_bool()))),
"i16_add_with_overflow" | "i16_sub_with_overflow" | "i16_mul_with_overflow" => "i16_add_with_overflow" | "i16_sub_with_overflow" | "i16_mul_with_overflow" =>
(0, ~[ty::mk_i16(), ty::mk_i16()], (0, vec!(ty::mk_i16(), ty::mk_i16()),
ty::mk_tup(tcx, ~[ty::mk_i16(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_i16(), ty::mk_bool()))),
"i32_add_with_overflow" | "i32_sub_with_overflow" | "i32_mul_with_overflow" => "i32_add_with_overflow" | "i32_sub_with_overflow" | "i32_mul_with_overflow" =>
(0, ~[ty::mk_i32(), ty::mk_i32()], (0, vec!(ty::mk_i32(), ty::mk_i32()),
ty::mk_tup(tcx, ~[ty::mk_i32(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_i32(), ty::mk_bool()))),
"i64_add_with_overflow" | "i64_sub_with_overflow" | "i64_mul_with_overflow" => "i64_add_with_overflow" | "i64_sub_with_overflow" | "i64_mul_with_overflow" =>
(0, ~[ty::mk_i64(), ty::mk_i64()], (0, vec!(ty::mk_i64(), ty::mk_i64()),
ty::mk_tup(tcx, ~[ty::mk_i64(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_i64(), ty::mk_bool()))),
"u8_add_with_overflow" | "u8_sub_with_overflow" | "u8_mul_with_overflow" => "u8_add_with_overflow" | "u8_sub_with_overflow" | "u8_mul_with_overflow" =>
(0, ~[ty::mk_u8(), ty::mk_u8()], (0, vec!(ty::mk_u8(), ty::mk_u8()),
ty::mk_tup(tcx, ~[ty::mk_u8(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_u8(), ty::mk_bool()))),
"u16_add_with_overflow" | "u16_sub_with_overflow" | "u16_mul_with_overflow" => "u16_add_with_overflow" | "u16_sub_with_overflow" | "u16_mul_with_overflow" =>
(0, ~[ty::mk_u16(), ty::mk_u16()], (0, vec!(ty::mk_u16(), ty::mk_u16()),
ty::mk_tup(tcx, ~[ty::mk_u16(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_u16(), ty::mk_bool()))),
"u32_add_with_overflow" | "u32_sub_with_overflow" | "u32_mul_with_overflow"=> "u32_add_with_overflow" | "u32_sub_with_overflow" | "u32_mul_with_overflow"=>
(0, ~[ty::mk_u32(), ty::mk_u32()], (0, vec!(ty::mk_u32(), ty::mk_u32()),
ty::mk_tup(tcx, ~[ty::mk_u32(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_u32(), ty::mk_bool()))),
"u64_add_with_overflow" | "u64_sub_with_overflow" | "u64_mul_with_overflow" => "u64_add_with_overflow" | "u64_sub_with_overflow" | "u64_mul_with_overflow" =>
(0, ~[ty::mk_u64(), ty::mk_u64()], (0, vec!(ty::mk_u64(), ty::mk_u64()),
ty::mk_tup(tcx, ~[ty::mk_u64(), ty::mk_bool()])), ty::mk_tup(tcx, vec!(ty::mk_u64(), ty::mk_bool()))),
ref other => { ref other => {
tcx.sess.span_err(it.span, tcx.sess.span_err(it.span,

View File

@ -13,7 +13,9 @@
use middle::ty; use middle::ty;
use middle::ty_fold; use middle::ty_fold;
use middle::ty_fold::TypeFolder; use middle::ty_fold::TypeFolder;
use collections::HashMap; use collections::HashMap;
use std::vec_ng::Vec;
use util::ppaux::Repr; use util::ppaux::Repr;
use util::ppaux; use util::ppaux;
@ -74,7 +76,7 @@ pub fn relate_nested_regions(tcx: ty::ctxt,
*/ */
let mut rr = RegionRelator { tcx: tcx, let mut rr = RegionRelator { tcx: tcx,
stack: ~[], stack: Vec::new(),
relate_op: relate_op }; relate_op: relate_op };
match opt_region { match opt_region {
Some(o_r) => { rr.stack.push(o_r); } Some(o_r) => { rr.stack.push(o_r); }
@ -84,7 +86,7 @@ pub fn relate_nested_regions(tcx: ty::ctxt,
struct RegionRelator<'a> { struct RegionRelator<'a> {
tcx: ty::ctxt, tcx: ty::ctxt,
stack: ~[ty::Region], stack: Vec<ty::Region> ,
relate_op: 'a |ty::Region, ty::Region|, relate_op: 'a |ty::Region, ty::Region|,
} }
@ -147,7 +149,7 @@ pub fn relate_free_regions(tcx: ty::ctxt, fn_sig: &ty::FnSig) {
debug!("relate_free_regions >>"); debug!("relate_free_regions >>");
let mut all_tys = ~[]; let mut all_tys = Vec::new();
for arg in fn_sig.inputs.iter() { for arg in fn_sig.inputs.iter() {
all_tys.push(*arg); all_tys.push(*arg);
} }

View File

@ -26,9 +26,10 @@ use util::common::indenter;
use util::ppaux; use util::ppaux;
use util::ppaux::Repr; use util::ppaux::Repr;
use std::cell::RefCell;
use collections::HashSet; use collections::HashSet;
use std::cell::RefCell;
use std::result; use std::result;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::codemap::Span; use syntax::codemap::Span;
@ -100,13 +101,13 @@ fn lookup_vtables(vcx: &VtableContext,
// We do this backwards for reasons discussed above. // We do this backwards for reasons discussed above.
assert_eq!(substs.tps.len(), type_param_defs.len()); assert_eq!(substs.tps.len(), type_param_defs.len());
let mut result = let mut result: Vec<vtable_param_res> =
substs.tps.rev_iter() substs.tps.rev_iter()
.zip(type_param_defs.rev_iter()) .zip(type_param_defs.rev_iter())
.map(|(ty, def)| .map(|(ty, def)|
lookup_vtables_for_param(vcx, location_info, Some(substs), lookup_vtables_for_param(vcx, location_info, Some(substs),
&*def.bounds, *ty, is_early)) &*def.bounds, *ty, is_early))
.to_owned_vec(); .collect();
result.reverse(); result.reverse();
assert_eq!(substs.tps.len(), result.len()); assert_eq!(substs.tps.len(), result.len());
@ -132,9 +133,12 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
let tcx = vcx.tcx(); let tcx = vcx.tcx();
// ty is the value supplied for the type parameter A... // ty is the value supplied for the type parameter A...
let mut param_result = ~[]; let mut param_result = Vec::new();
ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds, |trait_ref| { ty::each_bound_trait_and_supertraits(tcx,
type_param_bounds.trait_bounds
.as_slice(),
|trait_ref| {
// ...and here trait_ref is each bound that was declared on A, // ...and here trait_ref is each bound that was declared on A,
// expressed in terms of the type parameters. // expressed in terms of the type parameters.
@ -252,7 +256,11 @@ fn lookup_vtable(vcx: &VtableContext,
let vtable_opt = match ty::get(ty).sty { let vtable_opt = match ty::get(ty).sty {
ty::ty_param(param_ty {idx: n, ..}) => { ty::ty_param(param_ty {idx: n, ..}) => {
let type_param_bounds: &[@ty::TraitRef] = let type_param_bounds: &[@ty::TraitRef] =
vcx.param_env.type_param_bounds[n].trait_bounds; vcx.param_env
.type_param_bounds
.get(n)
.trait_bounds
.as_slice();
lookup_vtable_from_bounds(vcx, lookup_vtable_from_bounds(vcx,
location_info, location_info,
type_param_bounds, type_param_bounds,
@ -323,7 +331,7 @@ fn search_for_vtable(vcx: &VtableContext,
-> Option<vtable_origin> { -> Option<vtable_origin> {
let tcx = vcx.tcx(); let tcx = vcx.tcx();
let mut found = ~[]; let mut found = Vec::new();
let mut impls_seen = HashSet::new(); let mut impls_seen = HashSet::new();
// Load the implementations from external metadata if necessary. // Load the implementations from external metadata if necessary.
@ -336,7 +344,7 @@ fn search_for_vtable(vcx: &VtableContext,
let trait_impls = tcx.trait_impls.borrow(); let trait_impls = tcx.trait_impls.borrow();
trait_impls.get() trait_impls.get()
.find(&trait_ref.def_id) .find(&trait_ref.def_id)
.map_or(@RefCell::new(~[]), |x| *x) .map_or(@RefCell::new(Vec::new()), |x| *x)
}; };
// impls is the list of all impls in scope for trait_ref. // impls is the list of all impls in scope for trait_ref.
let impls = impls.borrow(); let impls = impls.borrow();
@ -392,7 +400,7 @@ fn search_for_vtable(vcx: &VtableContext,
// the type self_ty, and substs is bound to [T]. // the type self_ty, and substs is bound to [T].
debug!("The self ty is {} and its substs are {}", debug!("The self ty is {} and its substs are {}",
vcx.infcx.ty_to_str(for_ty), vcx.infcx.ty_to_str(for_ty),
vcx.infcx.tys_to_str(substs.tps)); vcx.infcx.tys_to_str(substs.tps.as_slice()));
// Next, we unify trait_ref -- the type that we want to cast // Next, we unify trait_ref -- the type that we want to cast
// to -- with of_trait_ref -- the trait that im implements. At // to -- with of_trait_ref -- the trait that im implements. At
@ -445,7 +453,7 @@ fn search_for_vtable(vcx: &VtableContext,
debug!("The fixed-up substs are {} - \ debug!("The fixed-up substs are {} - \
they will be unified with the bounds for \ they will be unified with the bounds for \
the target ty, {}", the target ty, {}",
vcx.infcx.tys_to_str(substs_f.tps), vcx.infcx.tys_to_str(substs_f.tps.as_slice()),
vcx.infcx.trait_ref_to_str(trait_ref)); vcx.infcx.trait_ref_to_str(trait_ref));
// Next, we unify the fixed-up substitutions for the impl self // Next, we unify the fixed-up substitutions for the impl self
@ -465,14 +473,14 @@ fn search_for_vtable(vcx: &VtableContext,
match found.len() { match found.len() {
0 => { return None } 0 => { return None }
1 => return Some(found[0].clone()), 1 => return Some(found.get(0).clone()),
_ => { _ => {
if !is_early { if !is_early {
vcx.tcx().sess.span_err( vcx.tcx().sess.span_err(
location_info.span, location_info.span,
"multiple applicable methods in scope"); "multiple applicable methods in scope");
} }
return Some(found[0].clone()); return Some(found.get(0).clone());
} }
} }
} }
@ -614,7 +622,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
let param_bounds = ty::ParamBounds { let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(), builtin_bounds: ty::EmptyBuiltinBounds(),
trait_bounds: ~[target_trait_ref] trait_bounds: vec!(target_trait_ref)
}; };
let vtables = let vtables =
lookup_vtables_for_param(&vcx, lookup_vtables_for_param(&vcx,
@ -625,7 +633,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
is_early); is_early);
if !is_early { if !is_early {
insert_vtables(fcx, ex.id, @~[vtables]); insert_vtables(fcx, ex.id, @vec!(vtables));
} }
// Now, if this is &trait, we need to link the // Now, if this is &trait, we need to link the
@ -701,11 +709,15 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
debug!("vtable resolution on parameter bounds for method call {}", debug!("vtable resolution on parameter bounds for method call {}",
ex.repr(fcx.tcx())); ex.repr(fcx.tcx()));
let type_param_defs = ty::method_call_type_param_defs(cx.tcx, method.origin); let type_param_defs = ty::method_call_type_param_defs(cx.tcx, method.origin);
if has_trait_bounds(*type_param_defs.borrow()) { if has_trait_bounds(type_param_defs.borrow().as_slice()) {
let substs = fcx.method_ty_substs(ex.id); let substs = fcx.method_ty_substs(ex.id);
let vcx = fcx.vtable_context(); let vcx = fcx.vtable_context();
let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex), let vtbls = lookup_vtables(&vcx,
*type_param_defs.borrow(), &substs, is_early); &location_info_for_expr(ex),
type_param_defs.borrow()
.as_slice(),
&substs,
is_early);
if !is_early { if !is_early {
insert_vtables(fcx, ex.id, vtbls); insert_vtables(fcx, ex.id, vtbls);
} }
@ -787,7 +799,7 @@ pub fn resolve_impl(tcx: ty::ctxt,
// but that falls out of doing this. // but that falls out of doing this.
let param_bounds = ty::ParamBounds { let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(), builtin_bounds: ty::EmptyBuiltinBounds(),
trait_bounds: ~[impl_trait_ref] trait_bounds: vec!(impl_trait_ref)
}; };
let t = ty::node_id_to_type(tcx, impl_item.id); let t = ty::node_id_to_type(tcx, impl_item.id);
let t = t.subst(tcx, &param_env.free_substs); let t = t.subst(tcx, &param_env.free_substs);
@ -817,7 +829,7 @@ pub fn trans_resolve_method(tcx: ty::ctxt, id: ast::NodeId,
substs: &ty::substs) -> Option<vtable_res> { substs: &ty::substs) -> Option<vtable_res> {
let generics = ty::lookup_item_type(tcx, ast_util::local_def(id)).generics; let generics = ty::lookup_item_type(tcx, ast_util::local_def(id)).generics;
let type_param_defs = generics.type_param_defs.borrow(); let type_param_defs = generics.type_param_defs.borrow();
if has_trait_bounds(*type_param_defs) { if has_trait_bounds(type_param_defs.as_slice()) {
let vcx = VtableContext { let vcx = VtableContext {
infcx: &infer::new_infer_ctxt(tcx), infcx: &infer::new_infer_ctxt(tcx),
param_env: &ty::construct_parameter_environment(tcx, None, [], [], [], id) param_env: &ty::construct_parameter_environment(tcx, None, [], [], [], id)
@ -827,7 +839,11 @@ pub fn trans_resolve_method(tcx: ty::ctxt, id: ast::NodeId,
span: tcx.map.span(id) span: tcx.map.span(id)
}; };
Some(lookup_vtables(&vcx, &loc_info, *type_param_defs, substs, false)) Some(lookup_vtables(&vcx,
&loc_info,
type_param_defs.as_slice(),
substs,
false))
} else { } else {
None None
} }

View File

@ -28,6 +28,7 @@ use middle::typeck::write_ty_to_tcx;
use util::ppaux; use util::ppaux;
use util::ppaux::Repr; use util::ppaux::Repr;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::print::pprust::pat_to_str; use syntax::print::pprust::pat_to_str;
@ -53,13 +54,13 @@ fn resolve_type_vars_in_type(fcx: @FnCtxt, sp: Span, typ: ty::t)
} }
fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t]) fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t])
-> ~[ty::t] { -> Vec<ty::t> {
tys.map(|t| { tys.iter().map(|t| {
match resolve_type_vars_in_type(fcx, sp, *t) { match resolve_type_vars_in_type(fcx, sp, *t) {
Some(t1) => t1, Some(t1) => t1,
None => ty::mk_err() None => ty::mk_err()
} }
}) }).collect()
} }
fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) { fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) {
@ -78,7 +79,7 @@ fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) {
return; return;
} }
}; };
let mut new_tps = ~[]; let mut new_tps = Vec::new();
for &subst in method.substs.tps.iter() { for &subst in method.substs.tps.iter() {
match resolve_type_vars_in_type(fcx, sp, subst) { match resolve_type_vars_in_type(fcx, sp, subst) {
Some(t) => new_tps.push(t), Some(t) => new_tps.push(t),
@ -122,7 +123,9 @@ fn resolve_vtable_map_entry(fcx: @FnCtxt, sp: Span, id: ast::NodeId) {
origin: &vtable_origin) -> vtable_origin { origin: &vtable_origin) -> vtable_origin {
match origin { match origin {
&vtable_static(def_id, ref tys, origins) => { &vtable_static(def_id, ref tys, origins) => {
let r_tys = resolve_type_vars_in_types(fcx, sp, *tys); let r_tys = resolve_type_vars_in_types(fcx,
sp,
tys.as_slice());
let r_origins = resolve_origins(fcx, sp, origins); let r_origins = resolve_origins(fcx, sp, origins);
vtable_static(def_id, r_tys, r_origins) vtable_static(def_id, r_tys, r_origins)
} }
@ -242,7 +245,7 @@ fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
write_ty_to_tcx(tcx, id, t); write_ty_to_tcx(tcx, id, t);
let mut ret = Some(t); let mut ret = Some(t);
fcx.opt_node_ty_substs(id, |substs| { fcx.opt_node_ty_substs(id, |substs| {
let mut new_tps = ~[]; let mut new_tps = Vec::new();
for subst in substs.tps.iter() { for subst in substs.tps.iter() {
match resolve_type_vars_in_type(fcx, sp, *subst) { match resolve_type_vars_in_type(fcx, sp, *subst) {
Some(t) => new_tps.push(t), Some(t) => new_tps.push(t),

View File

@ -46,15 +46,16 @@ use syntax::opt_vec;
use syntax::parse::token; use syntax::parse::token;
use syntax::visit; use syntax::visit;
use std::cell::RefCell;
use collections::HashSet; use collections::HashSet;
use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use std::vec; use std::vec_ng::Vec;
use std::vec_ng;
struct UniversalQuantificationResult { struct UniversalQuantificationResult {
monotype: t, monotype: t,
type_variables: ~[ty::t], type_variables: Vec<ty::t> ,
type_param_defs: Rc<~[ty::TypeParameterDef]> type_param_defs: Rc<Vec<ty::TypeParameterDef> >
} }
fn get_base_type(inference_context: &InferCtxt, fn get_base_type(inference_context: &InferCtxt,
@ -323,7 +324,7 @@ impl CoherenceChecker {
// `ProvidedMethodInfo` instance into the `provided_method_sources` map. // `ProvidedMethodInfo` instance into the `provided_method_sources` map.
fn instantiate_default_methods(&self, impl_id: ast::DefId, fn instantiate_default_methods(&self, impl_id: ast::DefId,
trait_ref: &ty::TraitRef, trait_ref: &ty::TraitRef,
all_methods: &mut ~[@Method]) { all_methods: &mut Vec<@Method> ) {
let tcx = self.crate_context.tcx; let tcx = self.crate_context.tcx;
debug!("instantiate_default_methods(impl_id={:?}, trait_ref={})", debug!("instantiate_default_methods(impl_id={:?}, trait_ref={})",
impl_id, trait_ref.repr(tcx)); impl_id, trait_ref.repr(tcx));
@ -354,8 +355,9 @@ impl CoherenceChecker {
// construct the polytype for the method based on the method_ty // construct the polytype for the method based on the method_ty
let new_generics = ty::Generics { let new_generics = ty::Generics {
type_param_defs: type_param_defs:
Rc::new(vec::append( Rc::new(vec_ng::append(
impl_poly_type.generics.type_param_defs().to_owned(), Vec::from_slice(impl_poly_type.generics
.type_param_defs()),
new_method_ty.generics.type_param_defs())), new_method_ty.generics.type_param_defs())),
region_param_defs: region_param_defs:
impl_poly_type.generics.region_param_defs.clone() impl_poly_type.generics.region_param_defs.clone()
@ -390,7 +392,7 @@ impl CoherenceChecker {
let mut inherent_impls = tcx.inherent_impls.borrow_mut(); let mut inherent_impls = tcx.inherent_impls.borrow_mut();
match inherent_impls.get().find(&base_def_id) { match inherent_impls.get().find(&base_def_id) {
None => { None => {
implementation_list = @RefCell::new(~[]); implementation_list = @RefCell::new(Vec::new());
inherent_impls.get().insert(base_def_id, implementation_list); inherent_impls.get().insert(base_def_id, implementation_list);
} }
Some(&existing_implementation_list) => { Some(&existing_implementation_list) => {
@ -409,7 +411,7 @@ impl CoherenceChecker {
let mut trait_impls = tcx.trait_impls.borrow_mut(); let mut trait_impls = tcx.trait_impls.borrow_mut();
match trait_impls.get().find(&base_def_id) { match trait_impls.get().find(&base_def_id) {
None => { None => {
implementation_list = @RefCell::new(~[]); implementation_list = @RefCell::new(Vec::new());
trait_impls.get().insert(base_def_id, implementation_list); trait_impls.get().insert(base_def_id, implementation_list);
} }
Some(&existing_implementation_list) => { Some(&existing_implementation_list) => {
@ -611,7 +613,7 @@ impl CoherenceChecker {
let tcx = self.crate_context.tcx; let tcx = self.crate_context.tcx;
match item.node { match item.node {
ItemImpl(_, ref trait_refs, _, ref ast_methods) => { ItemImpl(_, ref trait_refs, _, ref ast_methods) => {
let mut methods = ~[]; let mut methods = Vec::new();
for ast_method in ast_methods.iter() { for ast_method in ast_methods.iter() {
methods.push(ty::method(tcx, local_def(ast_method.id))); methods.push(ty::method(tcx, local_def(ast_method.id)));
} }
@ -722,7 +724,7 @@ impl CoherenceChecker {
// We'll error out later. For now, just don't ICE. // We'll error out later. For now, just don't ICE.
continue; continue;
} }
let method_def_id = impl_info.methods[0].def_id; let method_def_id = impl_info.methods.get(0).def_id;
let self_type = self.get_self_type_for_implementation(*impl_info); let self_type = self.get_self_type_for_implementation(*impl_info);
match ty::get(self_type.ty).sty { match ty::get(self_type.ty).sty {
@ -789,10 +791,10 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt,
num_trait_type_parameters + method.generics.type_param_defs().len(); num_trait_type_parameters + method.generics.type_param_defs().len();
// the new method type will have the type parameters from the impl + method // the new method type will have the type parameters from the impl + method
let combined_tps = vec::from_fn(num_method_type_parameters, |i| { let combined_tps = Vec::from_fn(num_method_type_parameters, |i| {
if i < num_trait_type_parameters { if i < num_trait_type_parameters {
// replace type parameters that come from trait with new value // replace type parameters that come from trait with new value
trait_ref.substs.tps[i] *trait_ref.substs.tps.get(i)
} else { } else {
// replace type parameters that belong to method with another // replace type parameters that belong to method with another
// type parameter, this time with the index adjusted // type parameter, this time with the index adjusted

View File

@ -46,6 +46,8 @@ use util::ppaux::Repr;
use std::rc::Rc; use std::rc::Rc;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use std::vec_ng;
use syntax::abi::AbiSet; use syntax::abi::AbiSet;
use syntax::ast::{RegionTyParamBound, TraitTyParamBound}; use syntax::ast::{RegionTyParamBound, TraitTyParamBound};
use syntax::ast; use syntax::ast;
@ -324,7 +326,8 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
let substs = substs { let substs = substs {
regions: ty::NonerasedRegions(rps_from_trait), regions: ty::NonerasedRegions(rps_from_trait),
self_ty: Some(self_param), self_ty: Some(self_param),
tps: non_shifted_trait_tps + shifted_method_tps tps: vec_ng::append(Vec::from_slice(non_shifted_trait_tps),
shifted_method_tps)
}; };
// create the type of `foo`, applying the substitution above // create the type of `foo`, applying the substitution above
@ -336,10 +339,11 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
// the substitution to any traits that appear in their bounds. // the substitution to any traits that appear in their bounds.
// add in the type parameters from the trait // add in the type parameters from the trait
let mut new_type_param_defs = ~[]; let mut new_type_param_defs = Vec::new();
let substd_type_param_defs = let substd_type_param_defs =
trait_ty_generics.type_param_defs.subst(tcx, &substs); trait_ty_generics.type_param_defs.subst(tcx, &substs);
new_type_param_defs.push_all(*substd_type_param_defs.borrow()); new_type_param_defs.push_all(substd_type_param_defs.borrow()
.as_slice());
// add in the "self" type parameter // add in the "self" type parameter
let self_trait_def = get_trait_def(ccx, local_def(trait_id)); let self_trait_def = get_trait_def(ccx, local_def(trait_id));
@ -349,14 +353,15 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
def_id: dummy_defid, def_id: dummy_defid,
bounds: @ty::ParamBounds { bounds: @ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(), builtin_bounds: ty::EmptyBuiltinBounds(),
trait_bounds: ~[self_trait_ref] trait_bounds: vec!(self_trait_ref)
}, },
default: None default: None
}); });
// add in the type parameters from the method // add in the type parameters from the method
let substd_type_param_defs = m.generics.type_param_defs.subst(tcx, &substs); let substd_type_param_defs = m.generics.type_param_defs.subst(tcx, &substs);
new_type_param_defs.push_all(*substd_type_param_defs.borrow()); new_type_param_defs.push_all(substd_type_param_defs.borrow()
.as_slice());
debug!("static method {} type_param_defs={} ty={}, substs={}", debug!("static method {} type_param_defs={} ty={}, substs={}",
m.def_id.repr(tcx), m.def_id.repr(tcx),
@ -420,7 +425,7 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
} }
let self_ty = ty::mk_self(ccx.tcx, local_def(id)); let self_ty = ty::mk_self(ccx.tcx, local_def(id));
let mut ty_trait_refs: ~[@ty::TraitRef] = ~[]; let mut ty_trait_refs: Vec<@ty::TraitRef> = Vec::new();
let mut bounds = ty::EmptyBuiltinBounds(); let mut bounds = ty::EmptyBuiltinBounds();
for ast_trait_ref in ast_trait_refs.iter() { for ast_trait_ref in ast_trait_refs.iter() {
let trait_def_id = ty::trait_ref_to_def_id(ccx.tcx, ast_trait_ref); let trait_def_id = ty::trait_ref_to_def_id(ccx.tcx, ast_trait_ref);
@ -494,8 +499,9 @@ fn convert_methods(ccx: &CrateCtxt,
// itself // itself
ty_param_bounds_and_ty { ty_param_bounds_and_ty {
generics: ty::Generics { generics: ty::Generics {
type_param_defs: Rc::new(vec::append( type_param_defs: Rc::new(vec_ng::append(
rcvr_ty_generics.type_param_defs().to_owned(), Vec::from_slice(
rcvr_ty_generics.type_param_defs()),
m_ty_generics.type_param_defs())), m_ty_generics.type_param_defs())),
region_param_defs: rcvr_ty_generics.region_param_defs.clone(), region_param_defs: rcvr_ty_generics.region_param_defs.clone(),
}, },
@ -860,7 +866,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
let tpt = ty_param_bounds_and_ty { let tpt = ty_param_bounds_and_ty {
generics: ty::Generics { generics: ty::Generics {
type_param_defs: ty_generics.type_param_defs.clone(), type_param_defs: ty_generics.type_param_defs.clone(),
region_param_defs: Rc::new(~[]), region_param_defs: Rc::new(Vec::new()),
}, },
ty: ty::mk_bare_fn(ccx.tcx, tofd) ty: ty::mk_bare_fn(ccx.tcx, tofd)
}; };
@ -946,8 +952,8 @@ pub fn ty_of_foreign_item(ccx: &CrateCtxt,
ast::ForeignItemStatic(t, _) => { ast::ForeignItemStatic(t, _) => {
ty::ty_param_bounds_and_ty { ty::ty_param_bounds_and_ty {
generics: ty::Generics { generics: ty::Generics {
type_param_defs: Rc::new(~[]), type_param_defs: Rc::new(Vec::new()),
region_param_defs: Rc::new(~[]), region_param_defs: Rc::new(Vec::new()),
}, },
ty: ast_ty_to_ty(ccx, &ExplicitRscope, t) ty: ast_ty_to_ty(ccx, &ExplicitRscope, t)
} }
@ -1008,7 +1014,7 @@ pub fn ty_generics(ccx: &CrateCtxt,
let mut param_bounds = ty::ParamBounds { let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(), builtin_bounds: ty::EmptyBuiltinBounds(),
trait_bounds: ~[] trait_bounds: Vec::new()
}; };
for ast_bound in ast_bounds.iter() { for ast_bound in ast_bounds.iter() {
match *ast_bound { match *ast_bound {
@ -1083,7 +1089,7 @@ pub fn mk_item_substs(ccx: &CrateCtxt,
ty_generics: &ty::Generics, ty_generics: &ty::Generics,
self_ty: Option<ty::t>) -> ty::substs self_ty: Option<ty::t>) -> ty::substs
{ {
let params: ~[ty::t] = let params: Vec<ty::t> =
ty_generics.type_param_defs().iter().enumerate().map( ty_generics.type_param_defs().iter().enumerate().map(
|(i, t)| ty::mk_param(ccx.tcx, i, t.def_id)).collect(); |(i, t)| ty::mk_param(ccx.tcx, i, t.def_id)).collect();

View File

@ -63,6 +63,7 @@ use util::common::indent;
use util::ppaux::Repr; use util::ppaux::Repr;
use std::result; use std::result;
use std::vec_ng::Vec;
use syntax::ast::{Onceness, Purity}; use syntax::ast::{Onceness, Purity};
use syntax::ast; use syntax::ast;
use syntax::opt_vec; use syntax::opt_vec;
@ -82,7 +83,7 @@ pub trait Combine {
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t>; fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t>;
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t>; fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t>;
fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> { fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<Vec<ty::t> > {
// Note: type parameters are always treated as *invariant* // Note: type parameters are always treated as *invariant*
// (otherwise the type system would be unsound). In the // (otherwise the type system would be unsound). In the
@ -92,7 +93,7 @@ pub trait Combine {
if as_.len() == bs.len() { if as_.len() == bs.len() {
result::fold_(as_.iter().zip(bs.iter()) result::fold_(as_.iter().zip(bs.iter())
.map(|(a, b)| eq_tys(self, *a, *b))) .map(|(a, b)| eq_tys(self, *a, *b)))
.then(|| Ok(as_.to_owned())) .then(|| Ok(Vec::from_slice(as_)))
} else { } else {
Err(ty::terr_ty_param_size(expected_found(self, Err(ty::terr_ty_param_size(expected_found(self,
as_.len(), as_.len(),
@ -180,7 +181,7 @@ pub trait Combine {
} }
} }
let tps = if_ok!(self.tps(as_.tps, bs.tps)); let tps = if_ok!(self.tps(as_.tps.as_slice(), bs.tps.as_slice()));
let self_ty = if_ok!(self.self_tys(as_.self_ty, bs.self_ty)); let self_ty = if_ok!(self.self_tys(as_.self_ty, bs.self_ty));
let regions = if_ok!(relate_region_params(self, let regions = if_ok!(relate_region_params(self,
item_def_id, item_def_id,
@ -396,7 +397,7 @@ pub fn eq_opt_regions<C:Combine>(
pub fn super_fn_sigs<C:Combine>(this: &C, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> { pub fn super_fn_sigs<C:Combine>(this: &C, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
fn argvecs<C:Combine>(this: &C, a_args: &[ty::t], b_args: &[ty::t]) -> cres<~[ty::t]> { fn argvecs<C:Combine>(this: &C, a_args: &[ty::t], b_args: &[ty::t]) -> cres<Vec<ty::t> > {
if a_args.len() == b_args.len() { if a_args.len() == b_args.len() {
result::collect(a_args.iter().zip(b_args.iter()) result::collect(a_args.iter().zip(b_args.iter())
.map(|(a, b)| this.args(*a, *b))) .map(|(a, b)| this.args(*a, *b)))
@ -409,7 +410,9 @@ pub fn super_fn_sigs<C:Combine>(this: &C, a: &ty::FnSig, b: &ty::FnSig) -> cres<
return Err(ty::terr_variadic_mismatch(expected_found(this, a.variadic, b.variadic))); return Err(ty::terr_variadic_mismatch(expected_found(this, a.variadic, b.variadic)));
} }
let inputs = if_ok!(argvecs(this, a.inputs, b.inputs)); let inputs = if_ok!(argvecs(this,
a.inputs.as_slice(),
b.inputs.as_slice()));
let output = if_ok!(this.tys(a.output, b.output)); let output = if_ok!(this.tys(a.output, b.output));
Ok(FnSig {binder_id: a.binder_id, Ok(FnSig {binder_id: a.binder_id,
inputs: inputs, inputs: inputs,

View File

@ -155,10 +155,16 @@ impl<'f> Combine for Glb<'f> {
fold_regions_in_sig( fold_regions_in_sig(
self.get_ref().infcx.tcx, self.get_ref().infcx.tcx,
&sig0, &sig0,
|r| generalize_region(self, snapshot, |r| {
new_vars, sig0.binder_id, generalize_region(self,
&a_map, a_vars, b_vars, snapshot,
r)); new_vars.as_slice(),
sig0.binder_id,
&a_map,
a_vars.as_slice(),
b_vars.as_slice(),
r)
});
debug!("sig1 = {}", sig1.inf_str(self.get_ref().infcx)); debug!("sig1 = {}", sig1.inf_str(self.get_ref().infcx));
return Ok(sig1); return Ok(sig1);

View File

@ -43,9 +43,11 @@ use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::unify::*; use middle::typeck::infer::unify::*;
use middle::typeck::infer::sub::Sub; use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::to_str::InferStr; use middle::typeck::infer::to_str::InferStr;
use collections::HashMap;
use util::common::indenter; use util::common::indenter;
use collections::HashMap;
use std::vec_ng::Vec;
pub trait LatticeValue { pub trait LatticeValue {
fn sub(cf: &CombineFields, a: &Self, b: &Self) -> ures; fn sub(cf: &CombineFields, a: &Self, b: &Self) -> ures;
fn lub(cf: &CombineFields, a: &Self, b: &Self) -> cres<Self>; fn lub(cf: &CombineFields, a: &Self, b: &Self) -> cres<Self>;
@ -522,7 +524,7 @@ pub fn lattice_var_and_t<L:LatticeDir + Combine,
pub fn var_ids<T:Combine>(this: &T, pub fn var_ids<T:Combine>(this: &T,
map: &HashMap<ty::BoundRegion, ty::Region>) map: &HashMap<ty::BoundRegion, ty::Region>)
-> ~[RegionVid] { -> Vec<RegionVid> {
map.iter().map(|(_, r)| match *r { map.iter().map(|(_, r)| match *r {
ty::ReInfer(ty::ReVar(r)) => { r } ty::ReInfer(ty::ReVar(r)) => { r }
r => { r => {

View File

@ -143,7 +143,7 @@ impl<'f> Combine for Lub<'f> {
fold_regions_in_sig( fold_regions_in_sig(
self.get_ref().infcx.tcx, self.get_ref().infcx.tcx,
&sig0, &sig0,
|r| generalize_region(self, snapshot, new_vars, |r| generalize_region(self, snapshot, new_vars.as_slice(),
sig0.binder_id, &a_map, r)); sig0.binder_id, &a_map, r));
return Ok(sig1); return Ok(sig1);

View File

@ -21,6 +21,7 @@ pub use middle::typeck::infer::resolve::{resolve_ivar, resolve_all};
pub use middle::typeck::infer::resolve::{resolve_nested_tvar}; pub use middle::typeck::infer::resolve::{resolve_nested_tvar};
pub use middle::typeck::infer::resolve::{resolve_rvar}; pub use middle::typeck::infer::resolve::{resolve_rvar};
use collections::HashMap;
use collections::SmallIntMap; use collections::SmallIntMap;
use middle::ty::{TyVid, IntVid, FloatVid, RegionVid, Vid}; use middle::ty::{TyVid, IntVid, FloatVid, RegionVid, Vid};
use middle::ty; use middle::ty;
@ -37,9 +38,8 @@ use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::unify::{ValsAndBindings, Root}; use middle::typeck::infer::unify::{ValsAndBindings, Root};
use middle::typeck::infer::error_reporting::ErrorReporting; use middle::typeck::infer::error_reporting::ErrorReporting;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use collections::HashMap;
use std::result; use std::result;
use std::vec; use std::vec_ng::Vec;
use syntax::ast::{MutImmutable, MutMutable}; use syntax::ast::{MutImmutable, MutMutable};
use syntax::ast; use syntax::ast;
use syntax::codemap; use syntax::codemap;
@ -260,7 +260,7 @@ pub fn fixup_err_to_str(f: fixup_err) -> ~str {
fn new_ValsAndBindings<V:Clone,T:Clone>() -> ValsAndBindings<V, T> { fn new_ValsAndBindings<V:Clone,T:Clone>() -> ValsAndBindings<V, T> {
ValsAndBindings { ValsAndBindings {
vals: SmallIntMap::new(), vals: SmallIntMap::new(),
bindings: ~[] bindings: Vec::new()
} }
} }
@ -622,8 +622,8 @@ impl InferCtxt {
ty::mk_var(self.tcx, self.next_ty_var_id()) ty::mk_var(self.tcx, self.next_ty_var_id())
} }
pub fn next_ty_vars(&self, n: uint) -> ~[ty::t] { pub fn next_ty_vars(&self, n: uint) -> Vec<ty::t> {
vec::from_fn(n, |_i| self.next_ty_var()) Vec::from_fn(n, |_i| self.next_ty_var())
} }
pub fn next_int_var_id(&self) -> IntVid { pub fn next_int_var_id(&self) -> IntVid {
@ -659,8 +659,8 @@ impl InferCtxt {
pub fn next_region_vars(&self, pub fn next_region_vars(&self,
origin: RegionVariableOrigin, origin: RegionVariableOrigin,
count: uint) count: uint)
-> ~[ty::Region] { -> Vec<ty::Region> {
vec::from_fn(count, |_| self.next_region_var(origin)) Vec::from_fn(count, |_| self.next_region_var(origin))
} }
pub fn fresh_bound_region(&self, binder_id: ast::NodeId) -> ty::Region { pub fn fresh_bound_region(&self, binder_id: ast::NodeId) -> ty::Region {

View File

@ -27,6 +27,7 @@ use util::ppaux::{Repr};
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::uint; use std::uint;
use std::vec; use std::vec;
use std::vec_ng::Vec;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use syntax::ast; use syntax::ast;
use syntax::opt_vec; use syntax::opt_vec;
@ -88,7 +89,7 @@ pub type CombineMap = HashMap<TwoRegions, RegionVid>;
pub struct RegionVarBindings { pub struct RegionVarBindings {
tcx: ty::ctxt, tcx: ty::ctxt,
var_origins: RefCell<~[RegionVariableOrigin]>, var_origins: RefCell<Vec<RegionVariableOrigin> >,
constraints: RefCell<HashMap<Constraint, SubregionOrigin>>, constraints: RefCell<HashMap<Constraint, SubregionOrigin>>,
lubs: RefCell<CombineMap>, lubs: RefCell<CombineMap>,
glbs: RefCell<CombineMap>, glbs: RefCell<CombineMap>,
@ -103,24 +104,24 @@ pub struct RegionVarBindings {
// actively snapshotting. The reason for this is that otherwise // actively snapshotting. The reason for this is that otherwise
// we end up adding entries for things like the lower bound on // we end up adding entries for things like the lower bound on
// a variable and so forth, which can never be rolled back. // a variable and so forth, which can never be rolled back.
undo_log: RefCell<~[UndoLogEntry]>, undo_log: RefCell<Vec<UndoLogEntry> >,
// This contains the results of inference. It begins as an empty // This contains the results of inference. It begins as an empty
// option and only acquires a value after inference is complete. // option and only acquires a value after inference is complete.
values: RefCell<Option<~[VarValue]>>, values: RefCell<Option<Vec<VarValue> >>,
} }
pub fn RegionVarBindings(tcx: ty::ctxt) -> RegionVarBindings { pub fn RegionVarBindings(tcx: ty::ctxt) -> RegionVarBindings {
RegionVarBindings { RegionVarBindings {
tcx: tcx, tcx: tcx,
var_origins: RefCell::new(~[]), var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None), values: RefCell::new(None),
constraints: RefCell::new(HashMap::new()), constraints: RefCell::new(HashMap::new()),
lubs: RefCell::new(HashMap::new()), lubs: RefCell::new(HashMap::new()),
glbs: RefCell::new(HashMap::new()), glbs: RefCell::new(HashMap::new()),
skolemization_count: Cell::new(0), skolemization_count: Cell::new(0),
bound_count: Cell::new(0), bound_count: Cell::new(0),
undo_log: RefCell::new(~[]) undo_log: RefCell::new(Vec::new())
} }
} }
@ -354,11 +355,11 @@ impl RegionVarBindings {
None => { None => {
let var_origins = self.var_origins.borrow(); let var_origins = self.var_origins.borrow();
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
var_origins.get()[rid.to_uint()].span(), var_origins.get().get(rid.to_uint()).span(),
format!("attempt to resolve region variable before \ format!("attempt to resolve region variable before \
values have been computed!")) values have been computed!"))
} }
Some(ref values) => values[rid.to_uint()] Some(ref values) => *values.get(rid.to_uint())
}; };
debug!("RegionVarBindings: resolve_var({:?}={})={:?}", debug!("RegionVarBindings: resolve_var({:?}={})={:?}",
@ -423,7 +424,7 @@ impl RegionVarBindings {
} }
pub fn vars_created_since_snapshot(&self, snapshot: uint) pub fn vars_created_since_snapshot(&self, snapshot: uint)
-> ~[RegionVid] { -> Vec<RegionVid> {
let undo_log = self.undo_log.borrow(); let undo_log = self.undo_log.borrow();
undo_log.get().slice_from(snapshot).iter() undo_log.get().slice_from(snapshot).iter()
.filter_map(|&elt| match elt { .filter_map(|&elt| match elt {
@ -433,7 +434,7 @@ impl RegionVarBindings {
.collect() .collect()
} }
pub fn tainted(&self, snapshot: uint, r0: Region) -> ~[Region] { pub fn tainted(&self, snapshot: uint, r0: Region) -> Vec<Region> {
/*! /*!
* Computes all regions that have been related to `r0` in any * Computes all regions that have been related to `r0` in any
* way since the snapshot `snapshot` was taken---`r0` itself * way since the snapshot `snapshot` was taken---`r0` itself
@ -453,11 +454,11 @@ impl RegionVarBindings {
// `result_set` acts as a worklist: we explore all outgoing // `result_set` acts as a worklist: we explore all outgoing
// edges and add any new regions we find to result_set. This // edges and add any new regions we find to result_set. This
// is not a terribly efficient implementation. // is not a terribly efficient implementation.
let mut result_set = ~[r0]; let mut result_set = vec!(r0);
let mut result_index = 0; let mut result_index = 0;
while result_index < result_set.len() { while result_index < result_set.len() {
// nb: can't use uint::range() here because result_set grows // nb: can't use uint::range() here because result_set grows
let r = result_set[result_index]; let r = *result_set.get(result_index);
debug!("result_index={}, r={:?}", result_index, r); debug!("result_index={}, r={:?}", result_index, r);
@ -466,18 +467,18 @@ impl RegionVarBindings {
// nb: can't use uint::range() here as we move result_set // nb: can't use uint::range() here as we move result_set
let regs = { let regs = {
let undo_log = self.undo_log.borrow(); let undo_log = self.undo_log.borrow();
match undo_log.get()[undo_index] { match undo_log.get().get(undo_index) {
AddConstraint(ConstrainVarSubVar(ref a, ref b)) => { &AddConstraint(ConstrainVarSubVar(ref a, ref b)) => {
Some((ReInfer(ReVar(*a)), Some((ReInfer(ReVar(*a)),
ReInfer(ReVar(*b)))) ReInfer(ReVar(*b))))
} }
AddConstraint(ConstrainRegSubVar(ref a, ref b)) => { &AddConstraint(ConstrainRegSubVar(ref a, ref b)) => {
Some((*a, ReInfer(ReVar(*b)))) Some((*a, ReInfer(ReVar(*b))))
} }
AddConstraint(ConstrainVarSubReg(ref a, ref b)) => { &AddConstraint(ConstrainVarSubReg(ref a, ref b)) => {
Some((ReInfer(ReVar(*a)), *b)) Some((ReInfer(ReVar(*a)), *b))
} }
AddConstraint(ConstrainRegSubReg(a, b)) => { &AddConstraint(ConstrainRegSubReg(a, b)) => {
Some((a, b)) Some((a, b))
} }
_ => { _ => {
@ -504,11 +505,10 @@ impl RegionVarBindings {
return result_set; return result_set;
fn consider_adding_edge(result_set: ~[Region], fn consider_adding_edge(result_set: Vec<Region> ,
r: Region, r: Region,
r1: Region, r1: Region,
r2: Region) -> ~[Region] r2: Region) -> Vec<Region> {
{
let mut result_set = result_set; let mut result_set = result_set;
if r == r1 { // Clearly, this is potentially inefficient. if r == r1 { // Clearly, this is potentially inefficient.
if !result_set.iter().any(|x| *x == r2) { if !result_set.iter().any(|x| *x == r2) {
@ -564,7 +564,7 @@ impl RegionVarBindings {
(ReInfer(ReVar(v_id)), _) | (_, ReInfer(ReVar(v_id))) => { (ReInfer(ReVar(v_id)), _) | (_, ReInfer(ReVar(v_id))) => {
let var_origins = self.var_origins.borrow(); let var_origins = self.var_origins.borrow();
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
var_origins.get()[v_id.to_uint()].span(), var_origins.get().get(v_id.to_uint()).span(),
format!("lub_concrete_regions invoked with \ format!("lub_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}", a, b)); non-concrete regions: {:?}, {:?}", a, b));
} }
@ -669,7 +669,7 @@ impl RegionVarBindings {
(_, ReInfer(ReVar(v_id))) => { (_, ReInfer(ReVar(v_id))) => {
let var_origins = self.var_origins.borrow(); let var_origins = self.var_origins.borrow();
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
var_origins.get()[v_id.to_uint()].span(), var_origins.get().get(v_id.to_uint()).span(),
format!("glb_concrete_regions invoked with \ format!("glb_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}", a, b)); non-concrete regions: {:?}, {:?}", a, b));
} }
@ -781,16 +781,16 @@ type RegionGraph = graph::Graph<(), Constraint>;
impl RegionVarBindings { impl RegionVarBindings {
fn infer_variable_values(&self, fn infer_variable_values(&self,
errors: &mut OptVec<RegionResolutionError>) errors: &mut OptVec<RegionResolutionError>)
-> ~[VarValue] { -> Vec<VarValue> {
let mut var_data = self.construct_var_data(); let mut var_data = self.construct_var_data();
self.expansion(var_data); self.expansion(var_data.as_mut_slice());
self.contraction(var_data); self.contraction(var_data.as_mut_slice());
self.collect_concrete_region_errors(errors); self.collect_concrete_region_errors(&mut *errors);
self.extract_values_and_collect_conflicts(var_data, errors) self.extract_values_and_collect_conflicts(var_data.as_slice(), errors)
} }
fn construct_var_data(&self) -> ~[VarData] { fn construct_var_data(&self) -> Vec<VarData> {
vec::from_fn(self.num_vars(), |_| { Vec::from_fn(self.num_vars(), |_| {
VarData { VarData {
// All nodes are initially classified as contracting; during // All nodes are initially classified as contracting; during
// the expansion phase, we will shift the classification for // the expansion phase, we will shift the classification for
@ -999,8 +999,7 @@ impl RegionVarBindings {
&self, &self,
var_data: &[VarData], var_data: &[VarData],
errors: &mut OptVec<RegionResolutionError>) errors: &mut OptVec<RegionResolutionError>)
-> ~[VarValue] -> Vec<VarValue> {
{
debug!("extract_values_and_collect_conflicts()"); debug!("extract_values_and_collect_conflicts()");
// This is the best way that I have found to suppress // This is the best way that I have found to suppress
@ -1073,7 +1072,7 @@ impl RegionVarBindings {
} }
} }
vec::from_fn(self.num_vars(), |idx| var_data[idx].value) Vec::from_fn(self.num_vars(), |idx| var_data[idx].value)
} }
fn construct_graph(&self) -> RegionGraph { fn construct_graph(&self) -> RegionGraph {
@ -1145,7 +1144,7 @@ impl RegionVarBindings {
{ {
let var_origins = self.var_origins.borrow(); let var_origins = self.var_origins.borrow();
errors.push(SubSupConflict( errors.push(SubSupConflict(
var_origins.get()[node_idx.to_uint()], *var_origins.get().get(node_idx.to_uint()),
lower_bound.origin, lower_bound.origin,
lower_bound.region, lower_bound.region,
upper_bound.origin, upper_bound.origin,
@ -1158,7 +1157,7 @@ impl RegionVarBindings {
let var_origins = self.var_origins.borrow(); let var_origins = self.var_origins.borrow();
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
var_origins.get()[node_idx.to_uint()].span(), var_origins.get().get(node_idx.to_uint()).span(),
format!("collect_error_for_expanding_node() could not find error \ format!("collect_error_for_expanding_node() could not find error \
for var {:?}, lower_bounds={}, upper_bounds={}", for var {:?}, lower_bounds={}, upper_bounds={}",
node_idx, node_idx,
@ -1192,7 +1191,7 @@ impl RegionVarBindings {
Err(_) => { Err(_) => {
let var_origins = self.var_origins.borrow(); let var_origins = self.var_origins.borrow();
errors.push(SupSupConflict( errors.push(SupSupConflict(
var_origins.get()[node_idx.to_uint()], *var_origins.get().get(node_idx.to_uint()),
upper_bound_1.origin, upper_bound_1.origin,
upper_bound_1.region, upper_bound_1.region,
upper_bound_2.origin, upper_bound_2.origin,
@ -1205,7 +1204,7 @@ impl RegionVarBindings {
let var_origins = self.var_origins.borrow(); let var_origins = self.var_origins.borrow();
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
var_origins.get()[node_idx.to_uint()].span(), var_origins.get().get(node_idx.to_uint()).span(),
format!("collect_error_for_contracting_node() could not find error \ format!("collect_error_for_contracting_node() could not find error \
for var {:?}, upper_bounds={}", for var {:?}, upper_bounds={}",
node_idx, node_idx,
@ -1218,17 +1217,17 @@ impl RegionVarBindings {
orig_node_idx: RegionVid, orig_node_idx: RegionVid,
dir: Direction, dir: Direction,
dup_vec: &mut [uint]) dup_vec: &mut [uint])
-> (~[RegionAndOrigin], bool) { -> (Vec<RegionAndOrigin> , bool) {
struct WalkState { struct WalkState {
set: HashSet<RegionVid>, set: HashSet<RegionVid>,
stack: ~[RegionVid], stack: Vec<RegionVid> ,
result: ~[RegionAndOrigin], result: Vec<RegionAndOrigin> ,
dup_found: bool dup_found: bool
} }
let mut state = WalkState { let mut state = WalkState {
set: HashSet::new(), set: HashSet::new(),
stack: ~[orig_node_idx], stack: vec!(orig_node_idx),
result: ~[], result: Vec::new(),
dup_found: false dup_found: false
}; };
state.set.insert(orig_node_idx); state.set.insert(orig_node_idx);

View File

@ -58,6 +58,7 @@ use middle::typeck::infer::unify::{Root, UnifyInferCtxtMethods};
use util::common::{indent, indenter}; use util::common::{indent, indenter};
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
pub static resolve_nested_tvar: uint = 0b0000000001; pub static resolve_nested_tvar: uint = 0b0000000001;
@ -83,7 +84,7 @@ pub struct ResolveState<'a> {
infcx: &'a InferCtxt, infcx: &'a InferCtxt,
modes: uint, modes: uint,
err: Option<fixup_err>, err: Option<fixup_err>,
v_seen: ~[TyVid], v_seen: Vec<TyVid> ,
type_depth: uint type_depth: uint
} }
@ -92,7 +93,7 @@ pub fn resolver<'a>(infcx: &'a InferCtxt, modes: uint) -> ResolveState<'a> {
infcx: infcx, infcx: infcx,
modes: modes, modes: modes,
err: None, err: None,
v_seen: ~[], v_seen: Vec::new(),
type_depth: 0 type_depth: 0
} }
} }

View File

@ -46,7 +46,7 @@ static EMPTY_SOURCE_STR: &str = "/* Hello, world! */";
fn setup_env(test_name: &str, source_string: &str) -> Env { fn setup_env(test_name: &str, source_string: &str) -> Env {
let messages = @DVec(); let messages = @DVec();
let matches = getopts(~[~"-Z", ~"verbose"], optgroups()).get(); let matches = getopts(vec!(~"-Z", ~"verbose"), optgroups()).get();
let diag = diagnostic::collect(messages); let diag = diagnostic::collect(messages);
let sessopts = build_session_options(~"rustc", &matches, diag); let sessopts = build_session_options(~"rustc", &matches, diag);
let sess = build_session(sessopts, None, diag); let sess = build_session(sessopts, None, diag);
@ -186,7 +186,7 @@ impl Env {
proto: ast::ProtoBare, proto: ast::ProtoBare,
onceness: ast::Many, onceness: ast::Many,
region: ty::ReStatic, region: ty::ReStatic,
bounds: @~[]}, bounds: @Vec::new()},
sig: FnSig { sig: FnSig {
inputs: inputs, inputs: inputs,
output: output_ty, output: output_ty,

View File

@ -17,6 +17,7 @@ use middle::typeck::infer::{Bounds, uok, ures};
use middle::typeck::infer::InferCtxt; use middle::typeck::infer::InferCtxt;
use middle::typeck::infer::to_str::InferStr; use middle::typeck::infer::to_str::InferStr;
use std::cell::RefCell; use std::cell::RefCell;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
#[deriving(Clone)] #[deriving(Clone)]
@ -27,7 +28,7 @@ pub enum VarValue<V, T> {
pub struct ValsAndBindings<V, T> { pub struct ValsAndBindings<V, T> {
vals: SmallIntMap<VarValue<V, T>>, vals: SmallIntMap<VarValue<V, T>>,
bindings: ~[(V, VarValue<V, T>)], bindings: Vec<(V, VarValue<V, T>)> ,
} }
pub struct Node<V, T> { pub struct Node<V, T> {

View File

@ -72,6 +72,7 @@ use util::nodemap::{DefIdMap, NodeMap};
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use std::vec_ng::Vec;
use collections::List; use collections::List;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::print::pprust::*; use syntax::print::pprust::*;
@ -152,9 +153,9 @@ pub struct MethodCallee {
// of the method to be invoked // of the method to be invoked
pub type MethodMap = @RefCell<NodeMap<MethodCallee>>; pub type MethodMap = @RefCell<NodeMap<MethodCallee>>;
pub type vtable_param_res = @~[vtable_origin]; pub type vtable_param_res = @Vec<vtable_origin> ;
// Resolutions for bounds of all parameters, left to right, for a given path. // Resolutions for bounds of all parameters, left to right, for a given path.
pub type vtable_res = @~[vtable_param_res]; pub type vtable_res = @Vec<vtable_param_res> ;
#[deriving(Clone)] #[deriving(Clone)]
pub enum vtable_origin { pub enum vtable_origin {
@ -163,7 +164,7 @@ pub enum vtable_origin {
from whence comes the vtable, and tys are the type substs. from whence comes the vtable, and tys are the type substs.
vtable_res is the vtable itself vtable_res is the vtable itself
*/ */
vtable_static(ast::DefId, ~[ty::t], vtable_res), vtable_static(ast::DefId, Vec<ty::t> , vtable_res),
/* /*
Dynamic vtable, comes from a parameter that has a bound on it: Dynamic vtable, comes from a parameter that has a bound on it:
@ -235,7 +236,7 @@ pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::NodeId, ty: ty::t) {
} }
pub fn write_substs_to_tcx(tcx: ty::ctxt, pub fn write_substs_to_tcx(tcx: ty::ctxt,
node_id: ast::NodeId, node_id: ast::NodeId,
substs: ~[ty::t]) { substs: Vec<ty::t> ) {
if substs.len() > 0u { if substs.len() > 0u {
debug!("write_substs_to_tcx({}, {:?})", node_id, debug!("write_substs_to_tcx({}, {:?})", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t))); substs.map(|t| ppaux::ty_to_str(tcx, *t)));
@ -271,8 +272,8 @@ pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: Span, id: ast::NodeId)
pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty { pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty { ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: Rc::new(~[]), generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),
region_param_defs: Rc::new(~[])}, region_param_defs: Rc::new(Vec::new())},
ty: t ty: t
} }
} }
@ -352,7 +353,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
abis: abi::AbiSet::Rust(), abis: abi::AbiSet::Rust(),
sig: ty::FnSig { sig: ty::FnSig {
binder_id: main_id, binder_id: main_id,
inputs: ~[], inputs: Vec::new(),
output: ty::mk_nil(), output: ty::mk_nil(),
variadic: false variadic: false
} }
@ -398,10 +399,10 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
abis: abi::AbiSet::Rust(), abis: abi::AbiSet::Rust(),
sig: ty::FnSig { sig: ty::FnSig {
binder_id: start_id, binder_id: start_id,
inputs: ~[ inputs: vec!(
ty::mk_int(), ty::mk_int(),
ty::mk_imm_ptr(tcx, ty::mk_imm_ptr(tcx, ty::mk_u8())) ty::mk_imm_ptr(tcx, ty::mk_imm_ptr(tcx, ty::mk_u8()))
], ),
output: ty::mk_int(), output: ty::mk_int(),
variadic: false variadic: false
} }

View File

@ -12,7 +12,7 @@
use middle::ty; use middle::ty;
use std::cell::Cell; use std::cell::Cell;
use std::vec; use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::opt_vec::OptVec; use syntax::opt_vec::OptVec;
@ -31,7 +31,7 @@ pub trait RegionScope {
fn anon_regions(&self, fn anon_regions(&self,
span: Span, span: Span,
count: uint) count: uint)
-> Result<~[ty::Region], ()>; -> Result<Vec<ty::Region> , ()>;
} }
// A scope in which all regions must be explicitly named // A scope in which all regions must be explicitly named
@ -41,7 +41,7 @@ impl RegionScope for ExplicitRscope {
fn anon_regions(&self, fn anon_regions(&self,
_span: Span, _span: Span,
_count: uint) _count: uint)
-> Result<~[ty::Region], ()> { -> Result<Vec<ty::Region> , ()> {
Err(()) Err(())
} }
} }
@ -66,10 +66,10 @@ impl RegionScope for BindingRscope {
fn anon_regions(&self, fn anon_regions(&self,
_: Span, _: Span,
count: uint) count: uint)
-> Result<~[ty::Region], ()> { -> Result<Vec<ty::Region> , ()> {
let idx = self.anon_bindings.get(); let idx = self.anon_bindings.get();
self.anon_bindings.set(idx + count); self.anon_bindings.set(idx + count);
Ok(vec::from_fn(count, |i| ty::ReLateBound(self.binder_id, Ok(Vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
ty::BrAnon(idx + i)))) ty::BrAnon(idx + i))))
} }
} }

View File

@ -196,8 +196,8 @@ use collections::HashMap;
use arena; use arena;
use arena::Arena; use arena::Arena;
use middle::ty; use middle::ty;
use std::vec;
use std::fmt; use std::fmt;
use std::vec_ng::Vec;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::opt_vec; use syntax::opt_vec;
@ -261,7 +261,7 @@ struct TermsContext<'a> {
inferred_map: HashMap<ast::NodeId, InferredIndex>, inferred_map: HashMap<ast::NodeId, InferredIndex>,
// Maps from an InferredIndex to the info for that variable. // Maps from an InferredIndex to the info for that variable.
inferred_infos: ~[InferredInfo<'a>], inferred_infos: Vec<InferredInfo<'a>> ,
} }
enum ParamKind { TypeParam, RegionParam, SelfParam } enum ParamKind { TypeParam, RegionParam, SelfParam }
@ -282,7 +282,7 @@ fn determine_parameters_to_be_inferred<'a>(tcx: ty::ctxt,
tcx: tcx, tcx: tcx,
arena: arena, arena: arena,
inferred_map: HashMap::new(), inferred_map: HashMap::new(),
inferred_infos: ~[], inferred_infos: Vec::new(),
// cache and share the variance struct used for items with // cache and share the variance struct used for items with
// no type/region parameters // no type/region parameters
@ -410,7 +410,7 @@ struct ConstraintContext<'a> {
invariant: VarianceTermPtr<'a>, invariant: VarianceTermPtr<'a>,
bivariant: VarianceTermPtr<'a>, bivariant: VarianceTermPtr<'a>,
constraints: ~[Constraint<'a>], constraints: Vec<Constraint<'a>> ,
} }
/// Declares that the variable `decl_id` appears in a location with /// Declares that the variable `decl_id` appears in a location with
@ -457,7 +457,7 @@ fn add_constraints_from_crate<'a>(terms_cx: TermsContext<'a>,
contravariant: contravariant, contravariant: contravariant,
invariant: invariant, invariant: invariant,
bivariant: bivariant, bivariant: bivariant,
constraints: ~[], constraints: Vec::new(),
}; };
visit::walk_crate(&mut constraint_cx, krate, ()); visit::walk_crate(&mut constraint_cx, krate, ());
constraint_cx constraint_cx
@ -561,7 +561,7 @@ impl<'a> ConstraintContext<'a> {
// variance not yet inferred, so return a symbolic // variance not yet inferred, so return a symbolic
// variance. // variance.
let InferredIndex(index) = self.inferred_index(param_def_id.node); let InferredIndex(index) = self.inferred_index(param_def_id.node);
self.terms_cx.inferred_infos[index].term self.terms_cx.inferred_infos.get(index).term
} else { } else {
// Parameter on an item defined within another crate: // Parameter on an item defined within another crate:
// variance already inferred, just look it up. // variance already inferred, just look it up.
@ -749,7 +749,7 @@ impl<'a> ConstraintContext<'a> {
let variance_decl = let variance_decl =
self.declared_variance(p.def_id, def_id, TypeParam, i); self.declared_variance(p.def_id, def_id, TypeParam, i);
let variance_i = self.xform(variance, variance_decl); let variance_i = self.xform(variance, variance_decl);
self.add_constraints_from_ty(substs.tps[i], variance_i); self.add_constraints_from_ty(*substs.tps.get(i), variance_i);
} }
match substs.regions { match substs.regions {
@ -835,15 +835,14 @@ impl<'a> ConstraintContext<'a> {
struct SolveContext<'a> { struct SolveContext<'a> {
terms_cx: TermsContext<'a>, terms_cx: TermsContext<'a>,
constraints: ~[Constraint<'a>], constraints: Vec<Constraint<'a>> ,
// Maps from an InferredIndex to the inferred value for that variable. // Maps from an InferredIndex to the inferred value for that variable.
solutions: ~[ty::Variance] solutions: Vec<ty::Variance> }
}
fn solve_constraints(constraints_cx: ConstraintContext) { fn solve_constraints(constraints_cx: ConstraintContext) {
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx; let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
let solutions = vec::from_elem(terms_cx.num_inferred(), ty::Bivariant); let solutions = Vec::from_elem(terms_cx.num_inferred(), ty::Bivariant);
let mut solutions_cx = SolveContext { let mut solutions_cx = SolveContext {
terms_cx: terms_cx, terms_cx: terms_cx,
constraints: constraints, constraints: constraints,
@ -868,18 +867,21 @@ impl<'a> SolveContext<'a> {
let Constraint { inferred, variance: term } = *constraint; let Constraint { inferred, variance: term } = *constraint;
let InferredIndex(inferred) = inferred; let InferredIndex(inferred) = inferred;
let variance = self.evaluate(term); let variance = self.evaluate(term);
let old_value = self.solutions[inferred]; let old_value = *self.solutions.get(inferred);
let new_value = glb(variance, old_value); let new_value = glb(variance, old_value);
if old_value != new_value { if old_value != new_value {
debug!("Updating inferred {} (node {}) \ debug!("Updating inferred {} (node {}) \
from {:?} to {:?} due to {}", from {:?} to {:?} due to {}",
inferred, inferred,
self.terms_cx.inferred_infos[inferred].param_id, self.terms_cx
.inferred_infos
.get(inferred)
.param_id,
old_value, old_value,
new_value, new_value,
term.to_str()); term.to_str());
self.solutions[inferred] = new_value; *self.solutions.get_mut(inferred) = new_value;
changed = true; changed = true;
} }
} }
@ -902,25 +904,28 @@ impl<'a> SolveContext<'a> {
let mut index = 0; let mut index = 0;
let num_inferred = self.terms_cx.num_inferred(); let num_inferred = self.terms_cx.num_inferred();
while index < num_inferred { while index < num_inferred {
let item_id = inferred_infos[index].item_id; let item_id = inferred_infos.get(index).item_id;
let mut item_variances = ty::ItemVariances { let mut item_variances = ty::ItemVariances {
self_param: None, self_param: None,
type_params: opt_vec::Empty, type_params: opt_vec::Empty,
region_params: opt_vec::Empty region_params: opt_vec::Empty
}; };
while index < num_inferred && while index < num_inferred &&
inferred_infos[index].item_id == item_id { inferred_infos.get(index).item_id == item_id {
let info = &inferred_infos[index]; let info = inferred_infos.get(index);
match info.kind { match info.kind {
SelfParam => { SelfParam => {
assert!(item_variances.self_param.is_none()); assert!(item_variances.self_param.is_none());
item_variances.self_param = Some(solutions[index]); item_variances.self_param =
Some(*solutions.get(index));
} }
TypeParam => { TypeParam => {
item_variances.type_params.push(solutions[index]); item_variances.type_params
.push(*solutions.get(index));
} }
RegionParam => { RegionParam => {
item_variances.region_params.push(solutions[index]); item_variances.region_params
.push(*solutions.get(index));
} }
} }
index += 1; index += 1;
@ -959,7 +964,7 @@ impl<'a> SolveContext<'a> {
} }
InferredTerm(InferredIndex(index)) => { InferredTerm(InferredIndex(index)) => {
self.solutions[index] *self.solutions.get(index)
} }
} }
} }

View File

@ -16,6 +16,7 @@ use syntax::visit;
use syntax::visit::Visitor; use syntax::visit::Visitor;
use std::local_data; use std::local_data;
use std::vec_ng::Vec;
use time; use time;
@ -66,7 +67,7 @@ pub fn indenter() -> _indenter {
pub fn field_expr(f: ast::Field) -> @ast::Expr { return f.expr; } pub fn field_expr(f: ast::Field) -> @ast::Expr { return f.expr; }
pub fn field_exprs(fields: ~[ast::Field]) -> ~[@ast::Expr] { pub fn field_exprs(fields: Vec<ast::Field> ) -> Vec<@ast::Expr> {
fields.map(|f| f.expr) fields.map(|f| f.expr)
} }

Some files were not shown because too many files have changed in this diff Show More