librustc: Fix snake case errors.

A number of functions/methods have been moved or renamed to align
better with rust standard conventions.

rustc:🔙🔗:WriteOutputFile => write_output_file
rustc::middle::ty::EmptyBuiltinBounds => empty_builtin_bounds
rustc::middle::ty::AllBuiltinBounds => all_builtin_bounds
rustc::middle::liveness::IrMaps => IrMaps::new
rustc::middle::liveness::Liveness => Liveness::new
rustc::middle::resolve::NameBindings => NameBindings::new
rustc::middle::resolve::PrimitiveTypeTable => PrimitiveTypeTable::new
rustc::middle::resolve::Resolver => Resolver::new
rustc::middle::trans::datum::Datum => Datum::new
rustc::middle::trans::datum::DatumBlock => DatumBlock::new
rustc::middle::trans::datum::Rvalue => Rvalue::new
rustc::middle::typeck::infer::new_ValsAndBindings => ::infer::unify::ValsAndBindings::new
rustc::middle::typeck::infer::region_inference::RegionVarBindings => RegionVarBindings::new

[breaking-change]
This commit is contained in:
Kevin Butler 2014-05-28 20:36:05 +01:00
parent ed5bf6621e
commit 09fc34066b
31 changed files with 304 additions and 302 deletions

View File

@ -73,7 +73,7 @@ fn run_cfail_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = compile_test(config, props, testfile);
if proc_res.status.success() {
fatal_ProcRes("compile-fail test compiled successfully!".to_string(),
fatal_proc_rec("compile-fail test compiled successfully!".to_string(),
&proc_res);
}
@ -97,7 +97,7 @@ fn run_rfail_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = compile_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
exec_compiled_test(config, props, testfile)
@ -108,7 +108,7 @@ fn run_rfail_test(config: &Config, props: &TestProps, testfile: &Path) {
// The value our Makefile configures valgrind to return on failure
static VALGRIND_ERR: int = 100;
if proc_res.status.matches_exit_status(VALGRIND_ERR) {
fatal_ProcRes("run-fail test isn't valgrind-clean!".to_string(),
fatal_proc_rec("run-fail test isn't valgrind-clean!".to_string(),
&proc_res);
}
@ -120,7 +120,7 @@ fn check_correct_failure_status(proc_res: &ProcRes) {
// The value the rust runtime returns on failure
static RUST_ERR: int = 101;
if !proc_res.status.matches_exit_status(RUST_ERR) {
fatal_ProcRes(
fatal_proc_rec(
format!("failure produced the wrong error: {}", proc_res.status),
proc_res);
}
@ -131,19 +131,19 @@ fn run_rpass_test(config: &Config, props: &TestProps, testfile: &Path) {
let mut proc_res = compile_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
proc_res = exec_compiled_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("test run failed!".to_string(), &proc_res);
fatal_proc_rec("test run failed!".to_string(), &proc_res);
}
} else {
let proc_res = jit_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("jit failed!".to_string(), &proc_res);
fatal_proc_rec("jit failed!".to_string(), &proc_res);
}
}
}
@ -172,7 +172,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
"normal");
if !proc_res.status.success() {
fatal_ProcRes(format!("pretty-printing failed in round {}", round),
fatal_proc_rec(format!("pretty-printing failed in round {}", round),
&proc_res);
}
@ -204,7 +204,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = typecheck_source(config, props, testfile, actual);
if !proc_res.status.success() {
fatal_ProcRes("pretty-printed source does not typecheck".to_string(),
fatal_proc_rec("pretty-printed source does not typecheck".to_string(),
&proc_res);
}
if props.no_pretty_expanded { return }
@ -212,13 +212,13 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
// additionally, run `--pretty expanded` and try to build it.
let proc_res = print_source(config, props, testfile, (*srcs.get(round)).clone(), "expanded");
if !proc_res.status.success() {
fatal_ProcRes(format!("pretty-printing (expanded) failed"), &proc_res);
fatal_proc_rec(format!("pretty-printing (expanded) failed"), &proc_res);
}
let ProcRes{ stdout: expanded_src, .. } = proc_res;
let proc_res = typecheck_source(config, props, testfile, expanded_src);
if !proc_res.status.success() {
fatal_ProcRes(format!("pretty-printed source (expanded) does \
fatal_proc_rec(format!("pretty-printed source (expanded) does \
not typecheck"),
&proc_res);
}
@ -326,7 +326,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
// compile test file (it shoud have 'compile-flags:-g' in the header)
let compiler_run_result = compile_test(config, props, testfile);
if !compiler_run_result.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &compiler_run_result);
fatal_proc_rec("compilation failed!".to_string(), &compiler_run_result);
}
let exe_file = make_exe_name(config, testfile);
@ -517,7 +517,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
// compile test file (it shoud have 'compile-flags:-g' in the header)
let compile_result = compile_test(config, props, testfile);
if !compile_result.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &compile_result);
fatal_proc_rec("compilation failed!".to_string(), &compile_result);
}
let exe_file = make_exe_name(config, testfile);
@ -560,7 +560,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
let debugger_run_result = run_lldb(config, &exe_file, &debugger_script);
if !debugger_run_result.status.success() {
fatal_ProcRes("Error while running LLDB".to_string(),
fatal_proc_rec("Error while running LLDB".to_string(),
&debugger_run_result);
}
@ -720,7 +720,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String])
}
}
if i != num_check_lines {
fatal_ProcRes(format!("line not found in debugger output: {}",
fatal_proc_rec(format!("line not found in debugger output: {}",
check_lines.get(i).unwrap()),
debugger_run_result);
}
@ -764,14 +764,14 @@ fn check_error_patterns(props: &TestProps,
let missing_patterns =
props.error_patterns.slice(next_err_idx, props.error_patterns.len());
if missing_patterns.len() == 1u {
fatal_ProcRes(format!("error pattern '{}' not found!",
fatal_proc_rec(format!("error pattern '{}' not found!",
missing_patterns[0]),
proc_res);
} else {
for pattern in missing_patterns.iter() {
error(format!("error pattern '{}' not found!", *pattern));
}
fatal_ProcRes("multiple error patterns not found".to_string(),
fatal_proc_rec("multiple error patterns not found".to_string(),
proc_res);
}
}
@ -779,7 +779,7 @@ fn check_error_patterns(props: &TestProps,
fn check_no_compiler_crash(proc_res: &ProcRes) {
for line in proc_res.stderr.as_slice().lines() {
if line.starts_with("error: internal compiler error:") {
fatal_ProcRes("compiler encountered internal error".to_string(),
fatal_proc_rec("compiler encountered internal error".to_string(),
proc_res);
}
}
@ -857,7 +857,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
}
if !was_expected && is_compiler_error_or_warning(line) {
fatal_ProcRes(format!("unexpected compiler error or warning: '{}'",
fatal_proc_rec(format!("unexpected compiler error or warning: '{}'",
line),
proc_res);
}
@ -866,7 +866,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
for (i, &flag) in found_flags.iter().enumerate() {
if !flag {
let ee = expected_errors.get(i);
fatal_ProcRes(format!("expected {} on line {} not found: {}",
fatal_proc_rec(format!("expected {} on line {} not found: {}",
ee.kind, ee.line, ee.msg),
proc_res);
}
@ -1047,7 +1047,7 @@ fn compose_and_run_compiler(
config.compile_lib_path.as_slice(),
None);
if !auxres.status.success() {
fatal_ProcRes(
fatal_proc_rec(
format!("auxiliary build of {} failed to compile: ",
abs_ab.display()),
&auxres);
@ -1286,7 +1286,7 @@ fn error(err: String) { println!("\nerror: {}", err); }
fn fatal(err: String) -> ! { error(err); fail!(); }
fn fatal_ProcRes(err: String, proc_res: &ProcRes) -> ! {
fn fatal_proc_rec(err: String, proc_res: &ProcRes) -> ! {
print!("\n\
error: {}\n\
status: {}\n\
@ -1562,35 +1562,35 @@ fn run_codegen_test(config: &Config, props: &TestProps,
let mut proc_res = compile_test_and_save_bitcode(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
proc_res = extract_function_from_bitcode(config, props, "test", testfile, "");
if !proc_res.status.success() {
fatal_ProcRes("extracting 'test' function failed".to_string(),
fatal_proc_rec("extracting 'test' function failed".to_string(),
&proc_res);
}
proc_res = disassemble_extract(config, props, testfile, "");
if !proc_res.status.success() {
fatal_ProcRes("disassembling extract failed".to_string(), &proc_res);
fatal_proc_rec("disassembling extract failed".to_string(), &proc_res);
}
let mut proc_res = compile_cc_with_clang_and_save_bitcode(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
proc_res = extract_function_from_bitcode(config, props, "test", testfile, "clang");
if !proc_res.status.success() {
fatal_ProcRes("extracting 'test' function failed".to_string(),
fatal_proc_rec("extracting 'test' function failed".to_string(),
&proc_res);
}
proc_res = disassemble_extract(config, props, testfile, "clang");
if !proc_res.status.success() {
fatal_ProcRes("disassembling extract failed".to_string(), &proc_res);
fatal_proc_rec("disassembling extract failed".to_string(), &proc_res);
}
let base = output_base_name(config, testfile);

View File

@ -69,7 +69,7 @@ pub fn llvm_err(sess: &Session, msg: String) -> ! {
}
}
pub fn WriteOutputFile(
pub fn write_output_file(
sess: &Session,
target: lib::llvm::TargetMachineRef,
pm: lib::llvm::PassManagerRef,
@ -90,7 +90,7 @@ pub fn WriteOutputFile(
pub mod write {
use back::lto;
use back::link::{WriteOutputFile, OutputType};
use back::link::{write_output_file, OutputType};
use back::link::{OutputTypeAssembly, OutputTypeBitcode};
use back::link::{OutputTypeExe, OutputTypeLlvmAssembly};
use back::link::{OutputTypeObject};
@ -310,7 +310,7 @@ pub mod write {
output.temp_path(OutputTypeAssembly)
};
with_codegen(tm, llmod, trans.no_builtins, |cpm| {
WriteOutputFile(sess, tm, cpm, llmod, &path,
write_output_file(sess, tm, cpm, llmod, &path,
lib::llvm::AssemblyFile);
});
}
@ -328,7 +328,7 @@ pub mod write {
match object_file {
Some(ref path) => {
with_codegen(tm, llmod, trans.no_builtins, |cpm| {
WriteOutputFile(sess, tm, cpm, llmod, path,
write_output_file(sess, tm, cpm, llmod, path,
lib::llvm::ObjectFile);
});
}
@ -339,7 +339,7 @@ pub mod write {
trans.no_builtins, |cpm| {
let out = output.temp_path(OutputTypeObject)
.with_extension("metadata.o");
WriteOutputFile(sess, tm, cpm,
write_output_file(sess, tm, cpm,
trans.metadata_module, &out,
lib::llvm::ObjectFile);
})

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -10,6 +10,7 @@
#![allow(non_uppercase_pattern_statics)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case_functions)]
#![allow(dead_code)]
use std::c_str::ToCStr;

View File

@ -19,7 +19,8 @@ use middle::lang_items;
use middle::ty;
use middle::typeck;
use reader = serialize::ebml::reader;
use serialize::ebml;
use serialize::ebml::reader;
use std::rc::Rc;
use syntax::ast;
use syntax::ast_map;
@ -206,7 +207,7 @@ pub fn get_field_type(tcx: &ty::ctxt, class_id: ast::DefId,
def: ast::DefId) -> ty::ty_param_bounds_and_ty {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(class_id.krate);
let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
let all_items = reader::get_doc(ebml::Doc::new(cdata.data()), tag_items);
let class_doc = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(class_id.node, all_items),
|| {

View File

@ -99,7 +99,7 @@ fn find_item<'a>(item_id: ast::NodeId, items: ebml::Doc<'a>) -> ebml::Doc<'a> {
// Looks up an item in the given metadata and returns an ebml doc pointing
// to the item data.
fn lookup_item<'a>(item_id: ast::NodeId, data: &'a [u8]) -> ebml::Doc<'a> {
let items = reader::get_doc(reader::Doc(data), tag_items);
let items = reader::get_doc(ebml::Doc::new(data), tag_items);
find_item(item_id, items)
}
@ -383,7 +383,7 @@ pub fn get_trait_def(cdata: Cmd,
tag_items_data_item_ty_param_bounds);
let rp_defs = item_region_param_defs(item_doc, cdata);
let sized = item_sized(item_doc);
let mut bounds = ty::EmptyBuiltinBounds();
let mut bounds = ty::empty_builtin_bounds();
// Collect the builtin bounds from the encoded supertraits.
// FIXME(#8559): They should be encoded directly.
reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
@ -443,7 +443,7 @@ pub fn get_impl_vtables(cdata: Cmd,
{
let item_doc = lookup_item(id, cdata.data());
let vtables_doc = reader::get_doc(item_doc, tag_item_impl_vtables);
let mut decoder = reader::Decoder(vtables_doc);
let mut decoder = reader::Decoder::new(vtables_doc);
typeck::impl_res {
trait_vtables: decoder.read_vtable_res(tcx, cdata),
@ -466,7 +466,7 @@ pub enum DefLike {
/// Iterates over the language items in the given crate.
pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool {
let root = reader::Doc(cdata.data());
let root = ebml::Doc::new(cdata.data());
let lang_items = reader::get_doc(root, tag_lang_items);
reader::tagged_docs(lang_items, tag_lang_items_item, |item_doc| {
let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
@ -506,7 +506,7 @@ fn each_child_of_item_or_crate(intr: Rc<IdentInterner>,
None => cdata
};
let other_crates_items = reader::get_doc(reader::Doc(crate_data.data()), tag_items);
let other_crates_items = reader::get_doc(ebml::Doc::new(crate_data.data()), tag_items);
// Get the item.
match maybe_find_item(child_def_id.node, other_crates_items) {
@ -534,7 +534,7 @@ fn each_child_of_item_or_crate(intr: Rc<IdentInterner>,
|inherent_impl_def_id_doc| {
let inherent_impl_def_id = item_def_id(inherent_impl_def_id_doc,
cdata);
let items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
let items = reader::get_doc(ebml::Doc::new(cdata.data()), tag_items);
match maybe_find_item(inherent_impl_def_id.node, items) {
None => {}
Some(inherent_impl_doc) => {
@ -599,7 +599,7 @@ fn each_child_of_item_or_crate(intr: Rc<IdentInterner>,
None => cdata
};
let other_crates_items = reader::get_doc(reader::Doc(crate_data.data()), tag_items);
let other_crates_items = reader::get_doc(ebml::Doc::new(crate_data.data()), tag_items);
// Get the item.
match maybe_find_item(child_def_id.node, other_crates_items) {
@ -626,7 +626,7 @@ pub fn each_child_of_item(intr: Rc<IdentInterner>,
get_crate_data: GetCrateDataCb,
callback: |DefLike, ast::Ident, ast::Visibility|) {
// Find the item.
let root_doc = reader::Doc(cdata.data());
let root_doc = ebml::Doc::new(cdata.data());
let items = reader::get_doc(root_doc, tag_items);
let item_doc = match maybe_find_item(id, items) {
None => return,
@ -647,7 +647,7 @@ pub fn each_top_level_item_of_crate(intr: Rc<IdentInterner>,
callback: |DefLike,
ast::Ident,
ast::Visibility|) {
let root_doc = reader::Doc(cdata.data());
let root_doc = ebml::Doc::new(cdata.data());
let misc_info_doc = reader::get_doc(root_doc, tag_misc_info);
let crate_items_doc = reader::get_doc(misc_info_doc,
tag_misc_info_crate_items);
@ -696,7 +696,7 @@ pub fn maybe_get_item_ast(cdata: Cmd, tcx: &ty::ctxt, id: ast::NodeId,
pub fn get_enum_variants(intr: Rc<IdentInterner>, cdata: Cmd, id: ast::NodeId,
tcx: &ty::ctxt) -> Vec<Rc<ty::VariantInfo>> {
let data = cdata.data();
let items = reader::get_doc(reader::Doc(data), tag_items);
let items = reader::get_doc(ebml::Doc::new(data), tag_items);
let item = find_item(id, items);
let mut disr_val = 0;
enum_variant_ids(item, cdata).iter().map(|did| {
@ -829,7 +829,7 @@ pub fn get_item_variances(cdata: Cmd, id: ast::NodeId) -> ty::ItemVariances {
let data = cdata.data();
let item_doc = lookup_item(id, data);
let variance_doc = reader::get_doc(item_doc, tag_item_variances);
let mut decoder = reader::Decoder(variance_doc);
let mut decoder = reader::Decoder::new(variance_doc);
Decodable::decode(&mut decoder).unwrap()
}
@ -1078,7 +1078,7 @@ fn list_crate_attributes(md: ebml::Doc, hash: &Svh,
}
pub fn get_crate_attributes(data: &[u8]) -> Vec<ast::Attribute> {
get_attributes(reader::Doc(data))
get_attributes(ebml::Doc::new(data))
}
#[deriving(Clone)]
@ -1090,7 +1090,7 @@ pub struct CrateDep {
pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
let mut deps: Vec<CrateDep> = Vec::new();
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> String {
@ -1123,40 +1123,40 @@ fn list_crate_deps(data: &[u8], out: &mut io::Writer) -> io::IoResult<()> {
}
pub fn maybe_get_crate_hash(data: &[u8]) -> Option<Svh> {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
reader::maybe_get_doc(cratedoc, tag_crate_hash).map(|doc| {
Svh::new(doc.as_str_slice())
})
}
pub fn get_crate_hash(data: &[u8]) -> Svh {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
Svh::new(hashdoc.as_str_slice())
}
pub fn maybe_get_crate_id(data: &[u8]) -> Option<CrateId> {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
reader::maybe_get_doc(cratedoc, tag_crate_crateid).map(|doc| {
from_str(doc.as_str_slice()).unwrap()
})
}
pub fn get_crate_triple(data: &[u8]) -> String {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple);
triple_doc.expect("No triple in crate").as_str().to_string()
}
pub fn get_crate_id(data: &[u8]) -> CrateId {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_crateid);
from_str(hashdoc.as_str_slice()).unwrap()
}
pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Writer) -> io::IoResult<()> {
let hash = get_crate_hash(bytes);
let md = reader::Doc(bytes);
let md = ebml::Doc::new(bytes);
try!(list_crate_attributes(md, &hash, out));
list_crate_deps(bytes, out)
}
@ -1183,7 +1183,7 @@ pub fn translate_def_id(cdata: Cmd, did: ast::DefId) -> ast::DefId {
}
pub fn each_impl(cdata: Cmd, callback: |ast::DefId|) {
let impls_doc = reader::get_doc(reader::Doc(cdata.data()), tag_impls);
let impls_doc = reader::get_doc(ebml::Doc::new(cdata.data()), tag_impls);
let _ = reader::tagged_docs(impls_doc, tag_impls_impl, |impl_doc| {
callback(item_def_id(impl_doc, cdata));
true
@ -1239,7 +1239,7 @@ pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt)
pub fn get_native_libraries(cdata: Cmd)
-> Vec<(cstore::NativeLibaryKind, String)> {
let libraries = reader::get_doc(reader::Doc(cdata.data()),
let libraries = reader::get_doc(ebml::Doc::new(cdata.data()),
tag_native_libraries);
let mut result = Vec::new();
reader::tagged_docs(libraries, tag_native_libraries_lib, |lib_doc| {
@ -1255,12 +1255,12 @@ pub fn get_native_libraries(cdata: Cmd)
}
pub fn get_macro_registrar_fn(data: &[u8]) -> Option<ast::NodeId> {
reader::maybe_get_doc(reader::Doc(data), tag_macro_registrar_fn)
reader::maybe_get_doc(ebml::Doc::new(data), tag_macro_registrar_fn)
.map(|doc| FromPrimitive::from_u32(reader::doc_as_u32(doc)).unwrap())
}
pub fn get_exported_macros(data: &[u8]) -> Vec<String> {
let macros = reader::get_doc(reader::Doc(data),
let macros = reader::get_doc(ebml::Doc::new(data),
tag_exported_macros);
let mut result = Vec::new();
reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
@ -1273,7 +1273,7 @@ pub fn get_exported_macros(data: &[u8]) -> Vec<String> {
pub fn get_dylib_dependency_formats(cdata: Cmd)
-> Vec<(ast::CrateNum, cstore::LinkagePreference)>
{
let formats = reader::get_doc(reader::Doc(cdata.data()),
let formats = reader::get_doc(ebml::Doc::new(cdata.data()),
tag_dylib_dependency_formats);
let mut result = Vec::new();
@ -1299,7 +1299,7 @@ pub fn get_dylib_dependency_formats(cdata: Cmd)
pub fn get_missing_lang_items(cdata: Cmd)
-> Vec<lang_items::LangItem>
{
let items = reader::get_doc(reader::Doc(cdata.data()), tag_lang_items);
let items = reader::get_doc(ebml::Doc::new(cdata.data()), tag_lang_items);
let mut result = Vec::new();
reader::tagged_docs(items, tag_lang_items_missing, |missing_doc| {
let item: lang_items::LangItem =

View File

@ -1813,7 +1813,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
type_abbrevs: RefCell::new(HashMap::new()),
};
let mut ebml_w = writer::Encoder(wr);
let mut ebml_w = writer::Encoder::new(wr);
encode_crate_id(&mut ebml_w, &ecx.link_meta.crateid);
encode_crate_triple(&mut ebml_w,

View File

@ -556,7 +556,7 @@ fn parse_type_param_def(st: &mut PState, conv: conv_did) -> ty::TypeParameterDef
fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds {
let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: Vec::new()
};
loop {

View File

@ -120,7 +120,7 @@ pub fn decode_inlined_item(cdata: &cstore::crate_metadata,
path_as_str = Some(s);
path_as_str.as_ref().map(|x| x.as_slice())
});
let mut ast_dsr = reader::Decoder(ast_doc);
let mut ast_dsr = reader::Decoder::new(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
let to_id_range = reserve_id_range(&dcx.tcx.sess, from_id_range);
let xcx = &ExtendedDecodeContext {
@ -349,7 +349,7 @@ fn simplify_ast(ii: e::InlinedItemRef) -> ast::InlinedItem {
fn decode_ast(par_doc: ebml::Doc) -> ast::InlinedItem {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder(chi_doc);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
@ -395,7 +395,7 @@ fn renumber_and_map_ast(xcx: &ExtendedDecodeContext,
// Encoding and decoding of ast::def
fn decode_def(xcx: &ExtendedDecodeContext, doc: ebml::Doc) -> ast::Def {
let mut dsr = reader::Decoder(doc);
let mut dsr = reader::Decoder::new(doc);
let def: ast::Def = Decodable::decode(&mut dsr).unwrap();
def.tr(xcx)
}
@ -1317,7 +1317,7 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext,
}
Some(value) => {
let val_doc = entry_doc.get(c::tag_table_val as uint);
let mut val_dsr = reader::Decoder(val_doc);
let mut val_dsr = reader::Decoder::new(val_doc);
let val_dsr = &mut val_dsr;
match value {
@ -1402,7 +1402,7 @@ fn encode_item_ast(ebml_w: &mut Encoder, item: @ast::Item) {
#[cfg(test)]
fn decode_item_ast(par_doc: ebml::Doc) -> @ast::Item {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder(chi_doc);
let mut d = reader::Decoder::new(chi_doc);
@Decodable::decode(&mut d).unwrap()
}
@ -1444,10 +1444,10 @@ fn roundtrip(in_item: Option<@ast::Item>) {
let in_item = in_item.unwrap();
let mut wr = MemWriter::new();
{
let mut ebml_w = writer::Encoder(&mut wr);
let mut ebml_w = writer::Encoder::new(&mut wr);
encode_item_ast(&mut ebml_w, in_item);
}
let ebml_doc = reader::Doc(wr.get_ref());
let ebml_doc = ebml::Doc::new(wr.get_ref());
let out_item = decode_item_ast(ebml_doc);
assert!(in_item == out_item);

View File

@ -371,7 +371,7 @@ pub fn check_builtin_bounds(cx: &Context,
bounds: ty::BuiltinBounds,
any_missing: |ty::BuiltinBounds|) {
let kind = ty::type_contents(cx.tcx, ty);
let mut missing = ty::EmptyBuiltinBounds();
let mut missing = ty::empty_builtin_bounds();
for bound in bounds.iter() {
if !kind.meets_bound(cx.tcx, bound) {
missing.add(bound);
@ -565,6 +565,7 @@ pub fn check_cast_for_escaping_regions(
}
});
#[allow(non_snake_case_functions)]
fn is_ReScope(r: ty::Region) -> bool {
match r {
ty::ReScope(..) => true,

View File

@ -177,7 +177,7 @@ impl<'a> Visitor<()> for IrMaps<'a> {
pub fn check_crate(tcx: &ty::ctxt,
krate: &Crate) {
visit::walk_crate(&mut IrMaps(tcx), krate, ());
visit::walk_crate(&mut IrMaps::new(tcx), krate, ());
tcx.sess.abort_if_errors();
}
@ -260,21 +260,20 @@ struct IrMaps<'a> {
lnks: Vec<LiveNodeKind>,
}
fn IrMaps<'a>(tcx: &'a ty::ctxt)
-> IrMaps<'a> {
IrMaps {
tcx: tcx,
num_live_nodes: 0,
num_vars: 0,
live_node_map: NodeMap::new(),
variable_map: NodeMap::new(),
capture_info_map: NodeMap::new(),
var_kinds: Vec::new(),
lnks: Vec::new(),
}
}
impl<'a> IrMaps<'a> {
fn new(tcx: &'a ty::ctxt) -> IrMaps<'a> {
IrMaps {
tcx: tcx,
num_live_nodes: 0,
num_vars: 0,
live_node_map: NodeMap::new(),
variable_map: NodeMap::new(),
capture_info_map: NodeMap::new(),
var_kinds: Vec::new(),
lnks: Vec::new(),
}
}
fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
let ln = LiveNode(self.num_live_nodes);
self.lnks.push(lnk);
@ -365,7 +364,7 @@ fn visit_fn(ir: &mut IrMaps,
let _i = ::util::common::indenter();
// swap in a new set of IR maps for this function body:
let mut fn_maps = IrMaps(ir.tcx);
let mut fn_maps = IrMaps::new(ir.tcx);
unsafe {
debug!("creating fn_maps: {}", transmute::<&IrMaps, *IrMaps>(&fn_maps));
@ -396,7 +395,7 @@ fn visit_fn(ir: &mut IrMaps,
};
// compute liveness
let mut lsets = Liveness(&mut fn_maps, specials);
let mut lsets = Liveness::new(&mut fn_maps, specials);
let entry_ln = lsets.compute(decl, body);
// check for various error conditions
@ -584,19 +583,19 @@ struct Liveness<'a> {
cont_ln: NodeMap<LiveNode>
}
fn Liveness<'a>(ir: &'a mut IrMaps<'a>, specials: Specials) -> Liveness<'a> {
Liveness {
ir: ir,
s: specials,
successors: Vec::from_elem(ir.num_live_nodes, invalid_node()),
users: Vec::from_elem(ir.num_live_nodes * ir.num_vars, invalid_users()),
loop_scope: Vec::new(),
break_ln: NodeMap::new(),
cont_ln: NodeMap::new(),
}
}
impl<'a> Liveness<'a> {
fn new(ir: &'a mut IrMaps<'a>, specials: Specials) -> Liveness<'a> {
Liveness {
ir: ir,
s: specials,
successors: Vec::from_elem(ir.num_live_nodes, invalid_node()),
users: Vec::from_elem(ir.num_live_nodes * ir.num_vars, invalid_users()),
loop_scope: Vec::new(),
break_ln: NodeMap::new(),
cont_ln: NodeMap::new(),
}
}
fn live_node(&self, node_id: NodeId, span: Span) -> LiveNode {
match self.ir.live_node_map.find(&node_id) {
Some(&ln) => ln,

View File

@ -549,6 +549,13 @@ enum TraitReferenceType {
}
impl NameBindings {
fn new() -> NameBindings {
NameBindings {
type_def: RefCell::new(None),
value_def: RefCell::new(None),
}
}
/// Creates a new module in this set of name bindings.
fn define_module(&self,
parent_link: ParentLink,
@ -749,49 +756,42 @@ impl NameBindings {
}
}
fn NameBindings() -> NameBindings {
NameBindings {
type_def: RefCell::new(None),
value_def: RefCell::new(None),
}
}
/// Interns the names of the primitive types.
struct PrimitiveTypeTable {
primitive_types: HashMap<Name, PrimTy>,
}
impl PrimitiveTypeTable {
fn new() -> PrimitiveTypeTable {
let mut table = PrimitiveTypeTable {
primitive_types: HashMap::new()
};
table.intern("bool", TyBool);
table.intern("char", TyChar);
table.intern("f32", TyFloat(TyF32));
table.intern("f64", TyFloat(TyF64));
table.intern("f128", TyFloat(TyF128));
table.intern("int", TyInt(TyI));
table.intern("i8", TyInt(TyI8));
table.intern("i16", TyInt(TyI16));
table.intern("i32", TyInt(TyI32));
table.intern("i64", TyInt(TyI64));
table.intern("str", TyStr);
table.intern("uint", TyUint(TyU));
table.intern("u8", TyUint(TyU8));
table.intern("u16", TyUint(TyU16));
table.intern("u32", TyUint(TyU32));
table.intern("u64", TyUint(TyU64));
table
}
fn intern(&mut self, string: &str, primitive_type: PrimTy) {
self.primitive_types.insert(token::intern(string), primitive_type);
}
}
fn PrimitiveTypeTable() -> PrimitiveTypeTable {
let mut table = PrimitiveTypeTable {
primitive_types: HashMap::new()
};
table.intern("bool", TyBool);
table.intern("char", TyChar);
table.intern("f32", TyFloat(TyF32));
table.intern("f64", TyFloat(TyF64));
table.intern("f128", TyFloat(TyF128));
table.intern("int", TyInt(TyI));
table.intern("i8", TyInt(TyI8));
table.intern("i16", TyInt(TyI16));
table.intern("i32", TyInt(TyI32));
table.intern("i64", TyInt(TyI64));
table.intern("str", TyStr);
table.intern("uint", TyUint(TyU));
table.intern("u8", TyUint(TyU8));
table.intern("u16", TyUint(TyU16));
table.intern("u32", TyUint(TyU32));
table.intern("u64", TyUint(TyU64));
return table;
}
fn namespace_error_to_str(ns: NamespaceError) -> &'static str {
match ns {
@ -802,62 +802,6 @@ fn namespace_error_to_str(ns: NamespaceError) -> &'static str {
}
}
fn Resolver<'a>(session: &'a Session,
lang_items: &'a LanguageItems,
crate_span: Span) -> Resolver<'a> {
let graph_root = NameBindings();
graph_root.define_module(NoParentLink,
Some(DefId { krate: 0, node: 0 }),
NormalModuleKind,
false,
true,
crate_span);
let current_module = graph_root.get_module();
let this = Resolver {
session: session,
lang_items: lang_items,
// The outermost module has def ID 0; this is not reflected in the
// AST.
graph_root: graph_root,
method_map: RefCell::new(FnvHashMap::new()),
structs: FnvHashMap::new(),
unresolved_imports: 0,
current_module: current_module,
value_ribs: RefCell::new(Vec::new()),
type_ribs: RefCell::new(Vec::new()),
label_ribs: RefCell::new(Vec::new()),
current_trait_ref: None,
current_self_type: None,
self_ident: special_idents::self_,
type_self_ident: special_idents::type_self,
primitive_type_table: PrimitiveTypeTable(),
namespaces: vec!(TypeNS, ValueNS),
def_map: RefCell::new(NodeMap::new()),
export_map2: RefCell::new(NodeMap::new()),
trait_map: NodeMap::new(),
used_imports: HashSet::new(),
external_exports: DefIdSet::new(),
last_private: NodeMap::new(),
emit_errors: true,
};
this
}
/// The main resolver class.
struct Resolver<'a> {
session: &'a Session,
@ -957,6 +901,57 @@ impl<'a, 'b> Visitor<()> for UnusedImportCheckVisitor<'a, 'b> {
}
impl<'a> Resolver<'a> {
fn new(session: &'a Session, lang_items: &'a LanguageItems, crate_span: Span) -> Resolver<'a> {
let graph_root = NameBindings::new();
graph_root.define_module(NoParentLink,
Some(DefId { krate: 0, node: 0 }),
NormalModuleKind,
false,
true,
crate_span);
let current_module = graph_root.get_module();
Resolver {
session: session,
lang_items: lang_items,
// The outermost module has def ID 0; this is not reflected in the
// AST.
graph_root: graph_root,
method_map: RefCell::new(FnvHashMap::new()),
structs: FnvHashMap::new(),
unresolved_imports: 0,
current_module: current_module,
value_ribs: RefCell::new(Vec::new()),
type_ribs: RefCell::new(Vec::new()),
label_ribs: RefCell::new(Vec::new()),
current_trait_ref: None,
current_self_type: None,
self_ident: special_idents::self_,
type_self_ident: special_idents::type_self,
primitive_type_table: PrimitiveTypeTable::new(),
namespaces: vec!(TypeNS, ValueNS),
def_map: RefCell::new(NodeMap::new()),
export_map2: RefCell::new(NodeMap::new()),
trait_map: NodeMap::new(),
used_imports: HashSet::new(),
external_exports: DefIdSet::new(),
last_private: NodeMap::new(),
emit_errors: true,
}
}
/// The main name resolution procedure.
fn resolve(&mut self, krate: &ast::Crate) {
self.build_reduced_graph(krate);
@ -1017,7 +1012,7 @@ impl<'a> Resolver<'a> {
let child = module_.children.borrow().find_copy(&name.name);
match child {
None => {
let child = Rc::new(NameBindings());
let child = Rc::new(NameBindings::new());
module_.children.borrow_mut().insert(name.name, child.clone());
child
}
@ -5574,7 +5569,7 @@ pub fn resolve_crate(session: &Session,
lang_items: &LanguageItems,
krate: &Crate)
-> CrateMap {
let mut resolver = Resolver(session, lang_items, krate.span);
let mut resolver = Resolver::new(session, lang_items, krate.span);
resolver.resolve(krate);
let Resolver { def_map, export_map2, trait_map, last_private,
external_exports, .. } = resolver;

View File

@ -997,7 +997,7 @@ fn match_datum(bcx: &Block,
*/
let ty = node_id_type(bcx, pat_id);
Datum(val, ty, Lvalue)
Datum::new(val, ty, Lvalue)
}
@ -1297,7 +1297,7 @@ fn store_non_ref_bindings<'a>(
match binding_info.trmode {
TrByValue(lldest) => {
let llval = Load(bcx, binding_info.llmatch); // get a T*
let datum = Datum(llval, binding_info.ty, Lvalue);
let datum = Datum::new(llval, binding_info.ty, Lvalue);
bcx = datum.store_to(bcx, lldest);
match opt_cleanup_scope {
@ -1334,7 +1334,7 @@ fn insert_lllocals<'a>(bcx: &'a Block<'a>,
TrByRef => binding_info.llmatch
};
let datum = Datum(llval, binding_info.ty, Lvalue);
let datum = Datum::new(llval, binding_info.ty, Lvalue);
fcx.schedule_drop_mem(cleanup_scope, llval, binding_info.ty);
debug!("binding {:?} to {}",
@ -2081,7 +2081,7 @@ pub fn store_arg<'a>(mut bcx: &'a Block<'a>,
// we emit extra-debug-info, which requires local allocas :(.
let arg_val = arg.add_clean(bcx.fcx, arg_scope);
bcx.fcx.llargs.borrow_mut()
.insert(pat.id, Datum(arg_val, arg_ty, Lvalue));
.insert(pat.id, Datum::new(arg_val, arg_ty, Lvalue));
bcx
} else {
mk_binding_alloca(
@ -2122,7 +2122,7 @@ fn mk_binding_alloca<'a,A>(bcx: &'a Block<'a>,
// Now that memory is initialized and has cleanup scheduled,
// create the datum and insert into the local variable map.
let datum = Datum(llval, var_ty, Lvalue);
let datum = Datum::new(llval, var_ty, Lvalue);
let mut llmap = match binding_mode {
BindLocal => bcx.fcx.lllocals.borrow_mut(),
BindArgument => bcx.fcx.llargs.borrow_mut()
@ -2183,7 +2183,7 @@ fn bind_irrefutable_pat<'a>(
ast::BindByValue(_) => {
// By value binding: move the value that `val`
// points at into the binding's stack slot.
let d = Datum(val, ty, Lvalue);
let d = Datum::new(val, ty, Lvalue);
d.store_to(bcx, llval)
}

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -70,7 +70,7 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
.connect(",")
.as_slice());
let mut clobbers = getClobbers();
let mut clobbers = get_clobbers();
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
clobbers = format!("{},{}", ia.clobbers.get(), clobbers);
} else {
@ -135,12 +135,12 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
#[cfg(target_arch = "arm")]
#[cfg(target_arch = "mips")]
fn getClobbers() -> String {
fn get_clobbers() -> String {
"".to_string()
}
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
fn getClobbers() -> String {
fn get_clobbers() -> String {
"~{dirflag},~{fpsr},~{flags}".to_string()
}

View File

@ -1210,7 +1210,7 @@ pub fn create_datums_for_fn_args(fcx: &FunctionContext,
let llarg = unsafe {
llvm::LLVMGetParam(fcx.llfn, fcx.arg_pos(i) as c_uint)
};
datum::Datum(llarg, arg_ty, arg_kind(fcx, arg_ty))
datum::Datum::new(llarg, arg_ty, arg_kind(fcx, arg_ty))
}).collect()
}

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -9,6 +9,7 @@
// except according to those terms.
#![allow(dead_code)] // FFI wrappers
#![allow(non_snake_case_functions)]
use lib::llvm::llvm;
use lib::llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -460,5 +460,5 @@ pub fn make_closure_from_bare_fn<'a>(bcx: &'a Block<'a>,
let wrapper = get_wrapper_for_bare_fn(bcx.ccx(), closure_ty, def, fn_ptr, true);
fill_fn_pair(bcx, scratch.val, wrapper, C_null(Type::i8p(bcx.ccx())));
DatumBlock(bcx, scratch.to_expr_datum())
DatumBlock::new(bcx, scratch.to_expr_datum())
}

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
#![allow(non_camel_case_types, non_snake_case_functions)]
//! Code that is useful in various trans modules.
@ -21,7 +21,6 @@ use middle::lang_items::LangItem;
use middle::trans::build;
use middle::trans::cleanup;
use middle::trans::datum;
use middle::trans::datum::{Datum, Lvalue};
use middle::trans::debuginfo;
use middle::trans::type_::Type;
use middle::ty;

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -71,8 +71,10 @@ pub struct Rvalue {
pub mode: RvalueMode
}
pub fn Rvalue(m: RvalueMode) -> Rvalue {
Rvalue { mode: m }
impl Rvalue {
pub fn new(m: RvalueMode) -> Rvalue {
Rvalue { mode: m }
}
}
// Make Datum linear for more type safety.
@ -89,25 +91,15 @@ pub enum RvalueMode {
ByValue,
}
pub fn Datum<K:KindOps>(val: ValueRef, ty: ty::t, kind: K) -> Datum<K> {
Datum { val: val, ty: ty, kind: kind }
}
pub fn DatumBlock<'a, K>(bcx: &'a Block<'a>,
datum: Datum<K>)
-> DatumBlock<'a, K> {
DatumBlock { bcx: bcx, datum: datum }
}
pub fn immediate_rvalue(val: ValueRef, ty: ty::t) -> Datum<Rvalue> {
return Datum(val, ty, Rvalue(ByValue));
return Datum::new(val, ty, Rvalue::new(ByValue));
}
pub fn immediate_rvalue_bcx<'a>(bcx: &'a Block<'a>,
val: ValueRef,
ty: ty::t)
-> DatumBlock<'a, Rvalue> {
return DatumBlock(bcx, immediate_rvalue(val, ty))
return DatumBlock::new(bcx, immediate_rvalue(val, ty))
}
@ -136,7 +128,7 @@ pub fn lvalue_scratch_datum<'a, A>(bcx: &'a Block<'a>,
let bcx = populate(arg, bcx, scratch);
bcx.fcx.schedule_drop_mem(scope, scratch, ty);
DatumBlock(bcx, Datum(scratch, ty, Lvalue))
DatumBlock::new(bcx, Datum::new(scratch, ty, Lvalue))
}
pub fn rvalue_scratch_datum(bcx: &Block,
@ -155,7 +147,7 @@ pub fn rvalue_scratch_datum(bcx: &Block,
let llty = type_of::type_of(bcx.ccx(), ty);
let scratch = alloca_maybe_zeroed(bcx, llty, name, false);
Datum(scratch, ty, Rvalue(ByRef))
Datum::new(scratch, ty, Rvalue::new(ByRef))
}
pub fn appropriate_rvalue_mode(ccx: &CrateContext, ty: ty::t) -> RvalueMode {
@ -320,7 +312,7 @@ impl Datum<Rvalue> {
match self.kind.mode {
ByRef => {
add_rvalue_clean(ByRef, fcx, scope, self.val, self.ty);
DatumBlock(bcx, Datum(self.val, self.ty, Lvalue))
DatumBlock::new(bcx, Datum::new(self.val, self.ty, Lvalue))
}
ByValue => {
@ -334,11 +326,11 @@ impl Datum<Rvalue> {
pub fn to_ref_datum<'a>(self, bcx: &'a Block<'a>) -> DatumBlock<'a, Rvalue> {
let mut bcx = bcx;
match self.kind.mode {
ByRef => DatumBlock(bcx, self),
ByRef => DatumBlock::new(bcx, self),
ByValue => {
let scratch = rvalue_scratch_datum(bcx, self.ty, "to_ref");
bcx = self.store_to(bcx, scratch.val);
DatumBlock(bcx, scratch)
DatumBlock::new(bcx, scratch)
}
}
}
@ -352,10 +344,10 @@ impl Datum<Rvalue> {
}
ByValue => {
match self.kind.mode {
ByValue => DatumBlock(bcx, self),
ByValue => DatumBlock::new(bcx, self),
ByRef => {
let llval = load(bcx, self.val, self.ty);
DatumBlock(bcx, Datum(llval, self.ty, Rvalue(ByValue)))
DatumBlock::new(bcx, Datum::new(llval, self.ty, Rvalue::new(ByValue)))
}
}
}
@ -378,8 +370,8 @@ impl Datum<Expr> {
-> R {
let Datum { val, ty, kind } = self;
match kind {
LvalueExpr => if_lvalue(Datum(val, ty, Lvalue)),
RvalueExpr(r) => if_rvalue(Datum(val, ty, r)),
LvalueExpr => if_lvalue(Datum::new(val, ty, Lvalue)),
RvalueExpr(r) => if_rvalue(Datum::new(val, ty, r)),
}
}
@ -455,7 +447,7 @@ impl Datum<Expr> {
expr_id: ast::NodeId)
-> DatumBlock<'a, Lvalue> {
self.match_kind(
|l| DatumBlock(bcx, l),
|l| DatumBlock::new(bcx, l),
|r| {
let scope = cleanup::temporary_scope(bcx.tcx(), expr_id);
r.to_lvalue_datum_in_scope(bcx, name, scope)
@ -478,16 +470,16 @@ impl Datum<Expr> {
ByRef => {
let scratch = rvalue_scratch_datum(bcx, l.ty, name);
bcx = l.store_to(bcx, scratch.val);
DatumBlock(bcx, scratch)
DatumBlock::new(bcx, scratch)
}
ByValue => {
let v = load(bcx, l.val, l.ty);
bcx = l.kind.post_store(bcx, l.val, l.ty);
DatumBlock(bcx, Datum(v, l.ty, Rvalue(ByValue)))
DatumBlock::new(bcx, Datum::new(v, l.ty, Rvalue::new(ByValue)))
}
}
},
|r| DatumBlock(bcx, r))
|r| DatumBlock::new(bcx, r))
}
}
@ -550,6 +542,10 @@ fn load<'a>(bcx: &'a Block<'a>, llptr: ValueRef, ty: ty::t) -> ValueRef {
* Generic methods applicable to any sort of datum.
*/
impl<K:KindOps> Datum<K> {
pub fn new(val: ValueRef, ty: ty::t, kind: K) -> Datum<K> {
Datum { val: val, ty: ty, kind: kind }
}
pub fn to_expr_datum(self) -> Datum<Expr> {
let Datum { val, ty, kind } = self;
Datum { val: val, ty: ty, kind: kind.to_expr_kind() }
@ -663,9 +659,15 @@ impl<K:KindOps> Datum<K> {
}
}
impl <'a, K> DatumBlock<'a, K> {
pub fn new(bcx: &'a Block<'a>, datum: Datum<K>) -> DatumBlock<'a, K> {
DatumBlock { bcx: bcx, datum: datum }
}
}
impl<'a, K:KindOps> DatumBlock<'a, K> {
pub fn to_expr_datumblock(self) -> DatumBlock<'a, Expr> {
DatumBlock(self.bcx, self.datum.to_expr_datum())
DatumBlock::new(self.bcx, self.datum.to_expr_datum())
}
}

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -941,6 +941,7 @@ fn is_node_local_to_unit(cx: &CrateContext, node_id: ast::NodeId) -> bool
!cx.reachable.contains(&node_id)
}
#[allow(non_snake_case_functions)]
fn create_DIArray(builder: DIBuilderRef, arr: &[DIDescriptor]) -> DIArray {
return unsafe {
llvm::LLVMDIBuilderGetOrCreateArray(builder, arr.as_ptr(), arr.len() as u32)
@ -2487,6 +2488,7 @@ fn debug_context<'a>(cx: &'a CrateContext) -> &'a CrateDebugContext {
}
#[inline]
#[allow(non_snake_case_functions)]
fn DIB(cx: &CrateContext) -> DIBuilderRef {
cx.dbg_cx.get_ref().builder
}

View File

@ -152,7 +152,7 @@ pub fn trans<'a>(bcx: &'a Block<'a>,
let datum = unpack_datum!(bcx, trans_unadjusted(bcx, expr));
let datum = unpack_datum!(bcx, apply_adjustments(bcx, expr, datum));
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
return DatumBlock(bcx, datum);
return DatumBlock::new(bcx, datum);
}
fn apply_adjustments<'a>(bcx: &'a Block<'a>,
@ -168,7 +168,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
let mut datum = datum;
let adjustment = match bcx.tcx().adjustments.borrow().find_copy(&expr.id) {
None => {
return DatumBlock(bcx, datum);
return DatumBlock::new(bcx, datum);
}
Some(adj) => { adj }
};
@ -244,7 +244,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
let scratch = rvalue_scratch_datum(bcx, slice_ty, "__adjust");
Store(bcx, base, GEPi(bcx, scratch.val, [0u, abi::slice_elt_base]));
Store(bcx, len, GEPi(bcx, scratch.val, [0u, abi::slice_elt_len]));
DatumBlock(bcx, scratch.to_expr_datum())
DatumBlock::new(bcx, scratch.to_expr_datum())
}
fn add_env<'a>(bcx: &'a Block<'a>,
@ -282,7 +282,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
let mut datum = source_datum.to_expr_datum();
datum.ty = target_obj_ty;
DatumBlock(bcx, datum)
DatumBlock::new(bcx, datum)
}
}
@ -357,7 +357,7 @@ fn trans_unadjusted<'a>(bcx: &'a Block<'a>,
let scratch = unpack_datum!(
bcx, scratch.to_appropriate_datum(bcx));
DatumBlock(bcx, scratch.to_expr_datum())
DatumBlock::new(bcx, scratch.to_expr_datum())
}
}
};
@ -365,7 +365,7 @@ fn trans_unadjusted<'a>(bcx: &'a Block<'a>,
fn nil<'a>(bcx: &'a Block<'a>, ty: ty::t) -> DatumBlock<'a, Expr> {
let llval = C_undef(type_of::type_of(bcx.ccx(), ty));
let datum = immediate_rvalue(llval, ty);
DatumBlock(bcx, datum.to_expr_datum())
DatumBlock::new(bcx, datum.to_expr_datum())
}
}
@ -394,7 +394,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>,
let datum = unpack_datum!(
bcx, tvec::trans_uniq_vstore(bcx, expr, contents));
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, contents.id);
DatumBlock(bcx, datum)
DatumBlock::new(bcx, datum)
}
ast::ExprBox(_, contents) => {
// Special case for `box T`. (The other case, for GC, is handled
@ -494,7 +494,7 @@ fn trans_index<'a>(bcx: &'a Block<'a>,
});
let elt = InBoundsGEP(bcx, base, [ix_val]);
let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
DatumBlock(bcx, Datum(elt, vt.unit_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(elt, vt.unit_ty, LvalueExpr))
}
fn trans_def<'a>(bcx: &'a Block<'a>,
@ -559,10 +559,10 @@ fn trans_def<'a>(bcx: &'a Block<'a>,
let did = get_did(bcx.ccx(), did);
let val = get_val(bcx, did, const_ty);
DatumBlock(bcx, Datum(val, const_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
}
_ => {
DatumBlock(bcx, trans_local_var(bcx, def).to_expr_datum())
DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
}
}
}
@ -845,7 +845,7 @@ fn trans_def_fn_unadjusted<'a>(bcx: &'a Block<'a>,
};
let fn_ty = expr_ty(bcx, ref_expr);
DatumBlock(bcx, Datum(llfn, fn_ty, RvalueExpr(Rvalue(ByValue))))
DatumBlock::new(bcx, Datum::new(llfn, fn_ty, RvalueExpr(Rvalue::new(ByValue))))
}
pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
@ -863,7 +863,7 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
// Can't move upvars, so this is never a ZeroMemLastUse.
let local_ty = node_id_type(bcx, nid);
match bcx.fcx.llupvars.borrow().find(&nid) {
Some(&val) => Datum(val, local_ty, Lvalue),
Some(&val) => Datum::new(val, local_ty, Lvalue),
None => {
bcx.sess().bug(format!(
"trans_local_var: no llval for upvar {:?} found",
@ -1664,7 +1664,7 @@ fn auto_ref<'a>(bcx: &'a Block<'a>,
// Construct the resulting datum, using what was the "by ref"
// ValueRef of type `referent_ty` to be the "by value" ValueRef
// of type `&referent_ty`.
DatumBlock(bcx, Datum(llref, ptr_ty, RvalueExpr(Rvalue(ByValue))))
DatumBlock::new(bcx, Datum::new(llref, ptr_ty, RvalueExpr(Rvalue::new(ByValue))))
}
fn deref_multiple<'a>(bcx: &'a Block<'a>,
@ -1717,7 +1717,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
let val = unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call,
datum, None, None));
let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty));
Datum(val, ref_ty, RvalueExpr(Rvalue(ByValue)))
Datum::new(val, ref_ty, RvalueExpr(Rvalue::new(ByValue)))
}
None => {
// Not overloaded. We already have a pointer we know how to deref.
@ -1740,7 +1740,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
let llptrref = datum.to_llref();
let llptr = Load(bcx, llptrref);
let llbody = GEPi(bcx, llptr, [0u, abi::box_field_body]);
DatumBlock(bcx, Datum(llbody, content_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(llbody, content_ty, LvalueExpr))
}
ty::ty_ptr(ty::mt { ty: content_ty, .. }) |
@ -1758,7 +1758,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
// rvalue for non-owning pointers like &T or *T, in which
// case cleanup *is* scheduled elsewhere, by the true
// owner (or, in the case of *T, by the user).
DatumBlock(bcx, Datum(ptr, content_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
}
}
}
@ -1818,10 +1818,10 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
(Load(bcx, datum.val), LvalueExpr)
}
RvalueExpr(Rvalue { mode: ByRef }) => {
(Load(bcx, datum.val), RvalueExpr(Rvalue(ByRef)))
(Load(bcx, datum.val), RvalueExpr(Rvalue::new(ByRef)))
}
RvalueExpr(Rvalue { mode: ByValue }) => {
(datum.val, RvalueExpr(Rvalue(ByRef)))
(datum.val, RvalueExpr(Rvalue::new(ByRef)))
}
};

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -309,7 +309,7 @@ pub fn trans_intrinsic(ccx: &CrateContext,
let mode = appropriate_rvalue_mode(ccx, tp_ty);
let src = Datum {val: get_param(decl, first_real_arg + 1u),
ty: tp_ty,
kind: Rvalue(mode)};
kind: Rvalue::new(mode)};
bcx = src.store_to(bcx, get_param(decl, first_real_arg));
RetVoid(bcx);
}

View File

@ -257,7 +257,7 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
alloc_fn,
[ llptrval, llsizeval ],
Some(expr::SaveIn(lldestval.val))).bcx;
return DatumBlock(bcx, lldestval).to_expr_datumblock();
return DatumBlock::new(bcx, lldestval).to_expr_datumblock();
}
_ => {}
}

View File

@ -840,11 +840,11 @@ pub enum BuiltinBound {
BoundShare,
}
pub fn EmptyBuiltinBounds() -> BuiltinBounds {
pub fn empty_builtin_bounds() -> BuiltinBounds {
EnumSet::empty()
}
pub fn AllBuiltinBounds() -> BuiltinBounds {
pub fn all_builtin_bounds() -> BuiltinBounds {
let mut set = EnumSet::empty();
set.add(BoundStatic);
set.add(BoundSend);
@ -2833,7 +2833,7 @@ pub fn adjust_ty(cx: &ctxt,
ty::ClosureTy {fn_style: b.fn_style,
onceness: ast::Many,
store: store,
bounds: ty::AllBuiltinBounds(),
bounds: ty::all_builtin_bounds(),
sig: b.sig.clone()})
}
ref b => {
@ -4303,7 +4303,7 @@ pub fn visitor_object_ty(tcx: &ctxt,
trait_ref.def_id,
trait_ref.substs.clone(),
RegionTraitStore(region, ast::MutMutable),
EmptyBuiltinBounds())))
empty_builtin_bounds())))
}
pub fn item_variances(tcx: &ctxt, item_id: ast::DefId) -> Rc<ItemVariances> {

View File

@ -907,7 +907,7 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
match (ast_bounds, store) {
(&Some(ref bound_vec), _) => {
let mut builtin_bounds = ty::EmptyBuiltinBounds();
let mut builtin_bounds = ty::empty_builtin_bounds();
for ast_bound in bound_vec.iter() {
match *ast_bound {
ast::TraitTyParamBound(ref b) => {
@ -942,10 +942,10 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
},
// &'static Trait is sugar for &'static Trait:'static.
(&None, ty::RegionTraitStore(ty::ReStatic, _)) => {
let mut set = ty::EmptyBuiltinBounds(); set.add(ty::BoundStatic); set
let mut set = ty::empty_builtin_bounds(); set.add(ty::BoundStatic); set
}
// No bounds are automatically applied for &'r Trait or ~Trait
(&None, ty::RegionTraitStore(..)) |
(&None, ty::UniqTraitStore) => ty::EmptyBuiltinBounds(),
(&None, ty::UniqTraitStore) => ty::empty_builtin_bounds(),
}
}

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -274,12 +274,12 @@ fn construct_transformed_self_ty_for_object(
let r = r.subst(tcx, &substs); // handle Early-Bound lifetime
ty::mk_trait(tcx, trait_def_id, substs,
RegionTraitStore(r, mt.mutbl),
ty::EmptyBuiltinBounds())
ty::empty_builtin_bounds())
}
ty::ty_uniq(_) => { // must be SelfUniq
ty::mk_trait(tcx, trait_def_id, substs,
UniqTraitStore,
ty::EmptyBuiltinBounds())
ty::empty_builtin_bounds())
}
_ => {
tcx.sess.span_bug(span,

View File

@ -2335,7 +2335,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
}
_ => {
// Not an error! Means we're inferring the closure type
let mut bounds = ty::EmptyBuiltinBounds();
let mut bounds = ty::empty_builtin_bounds();
let onceness = match expr.node {
ast::ExprProc(..) => {
bounds.add(ty::BoundSend);

View File

@ -472,7 +472,7 @@ fn fixup_substs(vcx: &VtableContext,
let t = ty::mk_trait(tcx,
id, substs,
ty::RegionTraitStore(ty::ReStatic, ast::MutImmutable),
ty::EmptyBuiltinBounds());
ty::empty_builtin_bounds());
fixup_ty(vcx, span, t, is_early).map(|t_f| {
match ty::get(t_f).sty {
ty::ty_trait(ref inner) => inner.substs.clone(),
@ -574,7 +574,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
});
let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: vec!(target_trait_ref)
};
let vtables =
@ -766,7 +766,7 @@ pub fn resolve_impl(tcx: &ty::ctxt,
// purpose of this is to check for supertrait impls,
// but that falls out of doing this.
let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: vec!(Rc::new(impl_trait_ref))
};
let t = ty::node_id_to_type(tcx, impl_item.id);

View File

@ -347,7 +347,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
ident: special_idents::self_,
def_id: dummy_defid,
bounds: Rc::new(ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: vec!(self_trait_ref)
}),
default: None
@ -418,7 +418,7 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
let self_ty = ty::mk_self(ccx.tcx, local_def(id));
let mut ty_trait_refs: Vec<Rc<ty::TraitRef>> = Vec::new();
let mut bounds = ty::EmptyBuiltinBounds();
let mut bounds = ty::empty_builtin_bounds();
for ast_trait_ref in ast_trait_refs.iter() {
let trait_def_id = ty::trait_ref_to_def_id(ccx.tcx, ast_trait_ref);
// FIXME(#8559): Need to instantiate the trait_ref whether or not it's a
@ -1094,7 +1094,7 @@ fn ty_generics(ccx: &CrateCtxt,
*/
let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: Vec::new()
};
for ast_bound in ast_bounds.iter() {

View File

@ -22,7 +22,6 @@ pub use middle::typeck::infer::resolve::{resolve_nested_tvar};
pub use middle::typeck::infer::resolve::{resolve_rvar};
use collections::HashMap;
use collections::SmallIntMap;
use middle::ty::{TyVid, IntVid, FloatVid, RegionVid, Vid};
use middle::ty;
use middle::ty_fold;
@ -258,27 +257,20 @@ pub fn fixup_err_to_str(f: fixup_err) -> String {
}
}
fn new_ValsAndBindings<V:Clone,T:Clone>() -> ValsAndBindings<V, T> {
ValsAndBindings {
vals: SmallIntMap::new(),
bindings: Vec::new()
}
}
pub fn new_infer_ctxt<'a>(tcx: &'a ty::ctxt) -> InferCtxt<'a> {
InferCtxt {
tcx: tcx,
ty_var_bindings: RefCell::new(new_ValsAndBindings()),
ty_var_bindings: RefCell::new(ValsAndBindings::new()),
ty_var_counter: Cell::new(0),
int_var_bindings: RefCell::new(new_ValsAndBindings()),
int_var_bindings: RefCell::new(ValsAndBindings::new()),
int_var_counter: Cell::new(0),
float_var_bindings: RefCell::new(new_ValsAndBindings()),
float_var_bindings: RefCell::new(ValsAndBindings::new()),
float_var_counter: Cell::new(0),
region_vars: RegionVarBindings(tcx),
region_vars: RegionVarBindings::new(tcx),
}
}
@ -679,7 +671,7 @@ impl<'a> InferCtxt<'a> {
trait_ref.def_id,
trait_ref.substs.clone(),
ty::UniqTraitStore,
ty::EmptyBuiltinBounds());
ty::empty_builtin_bounds());
let dummy1 = self.resolve_type_vars_if_possible(dummy0);
match ty::get(dummy1).sty {
ty::ty_trait(box ty::TyTrait { ref def_id, ref substs, .. }) => {

View File

@ -143,21 +143,21 @@ pub struct RegionVarBindings<'a> {
values: RefCell<Option<Vec<VarValue> >>,
}
pub fn RegionVarBindings<'a>(tcx: &'a ty::ctxt) -> RegionVarBindings<'a> {
RegionVarBindings {
tcx: tcx,
var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None),
constraints: RefCell::new(HashMap::new()),
lubs: RefCell::new(HashMap::new()),
glbs: RefCell::new(HashMap::new()),
skolemization_count: Cell::new(0),
bound_count: Cell::new(0),
undo_log: RefCell::new(Vec::new())
}
}
impl<'a> RegionVarBindings<'a> {
pub fn new(tcx: &'a ty::ctxt) -> RegionVarBindings<'a> {
RegionVarBindings {
tcx: tcx,
var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None),
constraints: RefCell::new(HashMap::new()),
lubs: RefCell::new(HashMap::new()),
glbs: RefCell::new(HashMap::new()),
skolemization_count: Cell::new(0),
bound_count: Cell::new(0),
undo_log: RefCell::new(Vec::new())
}
}
pub fn in_snapshot(&self) -> bool {
self.undo_log.borrow().len() > 0
}

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -30,6 +30,15 @@ pub struct ValsAndBindings<V, T> {
pub bindings: Vec<(V, VarValue<V, T>)> ,
}
impl<V:Clone, T:Clone> ValsAndBindings<V, T> {
pub fn new() -> ValsAndBindings<V, T> {
ValsAndBindings {
vals: SmallIntMap::new(),
bindings: Vec::new()
}
}
}
pub struct Node<V, T> {
pub root: V,
pub possible_types: T,