From 03ab6351ccc7b0e2b6102f88eddc0bbe84f2abc0 Mon Sep 17 00:00:00 2001 From: Patrick Walton Date: Fri, 14 Jun 2013 18:21:47 -0700 Subject: [PATCH] librustc: Rewrite reachability and forbid duplicate methods in type implementations. This should allow fewer symbols to be exported. --- src/libextra/ebml.rs | 30 +- src/libextra/sync.rs | 8 +- src/libextra/test.rs | 282 ++++++------- src/librustc/driver/driver.rs | 18 +- src/librustc/metadata/encoder.rs | 75 ++-- src/librustc/metadata/tyencode.rs | 1 - src/librustc/middle/astencode.rs | 31 +- src/librustc/middle/reachable.rs | 414 ++++++++++++++++++++ src/librustc/middle/resolve.rs | 249 +++++------- src/librustc/middle/trans/base.rs | 14 +- src/librustc/middle/trans/callee.rs | 2 +- src/librustc/middle/trans/common.rs | 79 +++- src/librustc/middle/trans/mod.rs | 1 - src/librustc/middle/trans/reachable.rs | 246 ------------ src/librustc/middle/typeck/check/mod.rs | 7 +- src/librustc/middle/typeck/collect.rs | 16 +- src/librustc/middle/typeck/infer/combine.rs | 3 +- src/librustc/middle/typeck/infer/lattice.rs | 116 ++++-- src/librustc/middle/typeck/infer/resolve.rs | 2 +- src/librustc/middle/typeck/infer/sub.rs | 1 + src/librustc/middle/typeck/infer/unify.rs | 95 +++-- src/librustc/rustc.rs | 1 + src/librustpkg/rustpkg.rs | 15 +- src/libstd/path.rs | 56 ++- src/libstd/str.rs | 3 +- src/libsyntax/ast.rs | 5 +- src/libsyntax/ast_util.rs | 2 +- src/libsyntax/parse/common.rs | 248 ------------ src/libsyntax/parse/obsolete.rs | 21 +- src/libsyntax/parse/parser.rs | 251 +++++++++++- 30 files changed, 1331 insertions(+), 961 deletions(-) create mode 100644 src/librustc/middle/reachable.rs delete mode 100644 src/librustc/middle/trans/reachable.rs diff --git a/src/libextra/ebml.rs b/src/libextra/ebml.rs index dd3ba639c05..fad04b772eb 100644 --- a/src/libextra/ebml.rs +++ b/src/libextra/ebml.rs @@ -12,6 +12,8 @@ use core::prelude::*; +use core::str; + // Simple Extensible Binary Markup Language (ebml) reader and writer on a // cursor model. See the specification here: // http://www.matroska.org/technical/specs/rfc/index.html @@ -34,6 +36,20 @@ pub struct Doc { end: uint, } +impl Doc { + pub fn get(&self, tag: uint) -> Doc { + reader::get_doc(*self, tag) + } + + pub fn as_str_slice<'a>(&'a self) -> &'a str { + str::from_bytes_slice(self.data.slice(self.start, self.end)) + } + + pub fn as_str(&self) -> ~str { + self.as_str_slice().to_owned() + } +} + pub struct TaggedDoc { tag: uint, doc: Doc, @@ -94,20 +110,6 @@ pub mod reader { // ebml reading - impl Doc { - pub fn get(&self, tag: uint) -> Doc { - get_doc(*self, tag) - } - - pub fn as_str_slice<'a>(&'a self) -> &'a str { - str::from_bytes_slice(self.data.slice(self.start, self.end)) - } - - pub fn as_str(&self) -> ~str { - self.as_str_slice().to_owned() - } - } - struct Res { val: uint, next: uint diff --git a/src/libextra/sync.rs b/src/libextra/sync.rs index 61b6a233944..56168f923ad 100644 --- a/src/libextra/sync.rs +++ b/src/libextra/sync.rs @@ -153,7 +153,7 @@ impl Sem<()> { #[doc(hidden)] impl Sem<~[Waitqueue]> { - pub fn access(&self, blk: &fn() -> U) -> U { + pub fn access_waitqueue(&self, blk: &fn() -> U) -> U { let mut release = None; unsafe { do task::unkillable { @@ -456,7 +456,9 @@ impl Clone for Mutex { impl Mutex { /// Run a function with ownership of the mutex. - pub fn lock(&self, blk: &fn() -> U) -> U { (&self.sem).access(blk) } + pub fn lock(&self, blk: &fn() -> U) -> U { + (&self.sem).access_waitqueue(blk) + } /// Run a function with ownership of the mutex and a handle to a condvar. pub fn lock_cond(&self, blk: &fn(c: &Condvar) -> U) -> U { @@ -559,7 +561,7 @@ impl RWlock { unsafe { do task::unkillable { (&self.order_lock).acquire(); - do (&self.access_lock).access { + do (&self.access_lock).access_waitqueue { (&self.order_lock).release(); task::rekillable(blk) } diff --git a/src/libextra/test.rs b/src/libextra/test.rs index 7b68298a8dd..672c4cd648c 100644 --- a/src/libextra/test.rs +++ b/src/libextra/test.rs @@ -19,15 +19,21 @@ use core::prelude::*; use getopts; use sort; +use stats::Stats; use term; +use time::precise_time_ns; use core::comm::{stream, SharedChan}; use core::either; use core::io; +use core::num; use core::option; +use core::rand::RngUtil; +use core::rand; use core::result; use core::task; use core::to_str::ToStr; +use core::u64; use core::uint; use core::vec; @@ -609,152 +615,146 @@ fn calc_result(desc: &TestDesc, task_succeeded: bool) -> TestResult { } } +impl BenchHarness { + /// Callback for benchmark functions to run in their body. + pub fn iter(&mut self, inner:&fn()) { + self.ns_start = precise_time_ns(); + let k = self.iterations; + for u64::range(0, k) |_| { + inner(); + } + self.ns_end = precise_time_ns(); + } + + pub fn ns_elapsed(&mut self) -> u64 { + if self.ns_start == 0 || self.ns_end == 0 { + 0 + } else { + self.ns_end - self.ns_start + } + } + + pub fn ns_per_iter(&mut self) -> u64 { + if self.iterations == 0 { + 0 + } else { + self.ns_elapsed() / self.iterations + } + } + + pub fn bench_n(&mut self, n: u64, f: &fn(&mut BenchHarness)) { + self.iterations = n; + debug!("running benchmark for %u iterations", + n as uint); + f(self); + } + + // This is the Go benchmark algorithm. It produces a single + // datapoint and always tries to run for 1s. + pub fn go_bench(&mut self, f: &fn(&mut BenchHarness)) { + + // Rounds a number down to the nearest power of 10. + fn round_down_10(n: u64) -> u64 { + let mut n = n; + let mut res = 1; + while n > 10 { + n = n / 10; + res *= 10; + } + res + } + + // Rounds x up to a number of the form [1eX, 2eX, 5eX]. + fn round_up(n: u64) -> u64 { + let base = round_down_10(n); + if n < (2 * base) { + 2 * base + } else if n < (5 * base) { + 5 * base + } else { + 10 * base + } + } + + // Initial bench run to get ballpark figure. + let mut n = 1_u64; + self.bench_n(n, f); + + while n < 1_000_000_000 && + self.ns_elapsed() < 1_000_000_000 { + let last = n; + + // Try to estimate iter count for 1s falling back to 1bn + // iterations if first run took < 1ns. + if self.ns_per_iter() == 0 { + n = 1_000_000_000; + } else { + n = 1_000_000_000 / self.ns_per_iter(); + } + + n = u64::max(u64::min(n+n/2, 100*last), last+1); + n = round_up(n); + self.bench_n(n, f); + } + } + + // This is a more statistics-driven benchmark algorithm. + // It stops as quickly as 50ms, so long as the statistical + // properties are satisfactory. If those properties are + // not met, it may run as long as the Go algorithm. + pub fn auto_bench(&mut self, f: &fn(&mut BenchHarness)) -> ~[f64] { + + let mut rng = rand::rng(); + let mut magnitude = 10; + let mut prev_madp = 0.0; + + loop { + let n_samples = rng.gen_uint_range(50, 60); + let n_iter = rng.gen_uint_range(magnitude, + magnitude * 2); + + let samples = do vec::from_fn(n_samples) |_| { + self.bench_n(n_iter as u64, f); + self.ns_per_iter() as f64 + }; + + // Eliminate outliers + let med = samples.median(); + let mad = samples.median_abs_dev(); + let samples = do vec::filter(samples) |f| { + num::abs(*f - med) <= 3.0 * mad + }; + + debug!("%u samples, median %f, MAD=%f, %u survived filter", + n_samples, med as float, mad as float, + samples.len()); + + if samples.len() != 0 { + // If we have _any_ cluster of signal... + let curr_madp = samples.median_abs_dev_pct(); + if self.ns_elapsed() > 1_000_000 && + (curr_madp < 1.0 || + num::abs(curr_madp - prev_madp) < 0.1) { + return samples; + } + prev_madp = curr_madp; + + if n_iter > 20_000_000 || + self.ns_elapsed() > 20_000_000 { + return samples; + } + } + + magnitude *= 2; + } + } +} + pub mod bench { use core::prelude::*; - use core::num; - use core::rand::RngUtil; - use core::rand; - use core::u64; use core::vec; - use stats::Stats; use test::{BenchHarness, BenchSamples}; - use time::precise_time_ns; - - impl BenchHarness { - /// Callback for benchmark functions to run in their body. - pub fn iter(&mut self, inner:&fn()) { - self.ns_start = precise_time_ns(); - let k = self.iterations; - for u64::range(0, k) |_| { - inner(); - } - self.ns_end = precise_time_ns(); - } - - pub fn ns_elapsed(&mut self) -> u64 { - if self.ns_start == 0 || self.ns_end == 0 { - 0 - } else { - self.ns_end - self.ns_start - } - } - - pub fn ns_per_iter(&mut self) -> u64 { - if self.iterations == 0 { - 0 - } else { - self.ns_elapsed() / self.iterations - } - } - - pub fn bench_n(&mut self, n: u64, f: &fn(&mut BenchHarness)) { - self.iterations = n; - debug!("running benchmark for %u iterations", - n as uint); - f(self); - } - - // This is the Go benchmark algorithm. It produces a single - // datapoint and always tries to run for 1s. - pub fn go_bench(&mut self, f: &fn(&mut BenchHarness)) { - - // Rounds a number down to the nearest power of 10. - fn round_down_10(n: u64) -> u64 { - let mut n = n; - let mut res = 1; - while n > 10 { - n = n / 10; - res *= 10; - } - res - } - - // Rounds x up to a number of the form [1eX, 2eX, 5eX]. - fn round_up(n: u64) -> u64 { - let base = round_down_10(n); - if n < (2 * base) { - 2 * base - } else if n < (5 * base) { - 5 * base - } else { - 10 * base - } - } - - // Initial bench run to get ballpark figure. - let mut n = 1_u64; - self.bench_n(n, f); - - while n < 1_000_000_000 && - self.ns_elapsed() < 1_000_000_000 { - let last = n; - - // Try to estimate iter count for 1s falling back to 1bn - // iterations if first run took < 1ns. - if self.ns_per_iter() == 0 { - n = 1_000_000_000; - } else { - n = 1_000_000_000 / self.ns_per_iter(); - } - - n = u64::max(u64::min(n+n/2, 100*last), last+1); - n = round_up(n); - self.bench_n(n, f); - } - } - - // This is a more statistics-driven benchmark algorithm. - // It stops as quickly as 50ms, so long as the statistical - // properties are satisfactory. If those properties are - // not met, it may run as long as the Go algorithm. - pub fn auto_bench(&mut self, f: &fn(&mut BenchHarness)) -> ~[f64] { - - let mut rng = rand::rng(); - let mut magnitude = 10; - let mut prev_madp = 0.0; - - loop { - let n_samples = rng.gen_uint_range(50, 60); - let n_iter = rng.gen_uint_range(magnitude, - magnitude * 2); - - let samples = do vec::from_fn(n_samples) |_| { - self.bench_n(n_iter as u64, f); - self.ns_per_iter() as f64 - }; - - // Eliminate outliers - let med = samples.median(); - let mad = samples.median_abs_dev(); - let samples = do vec::filter(samples) |f| { - num::abs(*f - med) <= 3.0 * mad - }; - - debug!("%u samples, median %f, MAD=%f, %u survived filter", - n_samples, med as float, mad as float, - samples.len()); - - if samples.len() != 0 { - // If we have _any_ cluster of signal... - let curr_madp = samples.median_abs_dev_pct(); - if self.ns_elapsed() > 1_000_000 && - (curr_madp < 1.0 || - num::abs(curr_madp - prev_madp) < 0.1) { - return samples; - } - prev_madp = curr_madp; - - if n_iter > 20_000_000 || - self.ns_elapsed() > 20_000_000 { - return samples; - } - } - - magnitude *= 2; - } - } - } pub fn benchmark(f: &fn(&mut BenchHarness)) -> BenchSamples { diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 2229a85836a..18693b52fc8 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -19,7 +19,7 @@ use front; use lib::llvm::llvm; use metadata::{creader, cstore, filesearch}; use metadata; -use middle::{trans, freevars, kind, ty, typeck, lint, astencode}; +use middle::{trans, freevars, kind, ty, typeck, lint, astencode, reachable}; use middle; use util::common::time; use util::ppaux; @@ -299,10 +299,16 @@ pub fn compile_rest(sess: Session, time(time_passes, ~"kind checking", || kind::check_crate(ty_cx, method_map, crate)); + let reachable_map = + time(time_passes, ~"reachability checking", || + reachable::find_reachable(ty_cx, method_map, crate)); + time(time_passes, ~"lint checking", || lint::check_crate(ty_cx, crate)); - if phases.to == cu_no_trans { return (Some(crate), Some(ty_cx)); } + if phases.to == cu_no_trans { + return (Some(crate), Some(ty_cx)); + } let maps = astencode::Maps { root_map: root_map, @@ -315,9 +321,13 @@ pub fn compile_rest(sess: Session, let outputs = outputs.get_ref(); time(time_passes, ~"translation", || - trans::base::trans_crate(sess, crate, ty_cx, + trans::base::trans_crate(sess, + crate, + ty_cx, &outputs.obj_filename, - exp_map2, maps)) + exp_map2, + reachable_map, + maps)) }; let outputs = outputs.get_ref(); diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index f8ecef3b4a4..b1967752e45 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -16,7 +16,6 @@ use metadata::common::*; use metadata::cstore; use metadata::decoder; use metadata::tyencode; -use middle::trans::reachable; use middle::ty::node_id_to_type; use middle::ty; use middle; @@ -60,7 +59,6 @@ pub type encode_inlined_item<'self> = &'self fn(ecx: &EncodeContext, pub struct EncodeParams<'self> { diag: @span_handler, tcx: ty::ctxt, - reachable: reachable::map, reexports2: middle::resolve::ExportMap2, item_symbols: &'self HashMap, discrim_symbols: &'self HashMap, @@ -87,7 +85,6 @@ pub struct EncodeContext<'self> { diag: @span_handler, tcx: ty::ctxt, stats: @mut Stats, - reachable: reachable::map, reexports2: middle::resolve::ExportMap2, item_symbols: &'self HashMap, discrim_symbols: &'self HashMap, @@ -157,8 +154,8 @@ fn encode_trait_ref(ebml_w: &mut writer::Encoder, diag: ecx.diag, ds: def_to_str, tcx: ecx.tcx, - reachable: |a| r.contains(&a), - abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)}; + abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs) + }; ebml_w.start_tag(tag); tyencode::enc_trait_ref(ebml_w.writer, ty_str_ctxt, trait_ref); @@ -185,8 +182,8 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder, diag: ecx.diag, ds: def_to_str, tcx: ecx.tcx, - reachable: |a| r.contains(&a), - abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)}; + abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs) + }; for params.iter().advance |param| { ebml_w.start_tag(tag); tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param); @@ -218,8 +215,8 @@ pub fn write_type(ecx: &EncodeContext, diag: ecx.diag, ds: def_to_str, tcx: ecx.tcx, - reachable: |a| r.contains(&a), - abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)}; + abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs) + }; tyencode::enc_ty(ebml_w.writer, ty_str_ctxt, typ); } @@ -231,8 +228,8 @@ pub fn write_vstore(ecx: &EncodeContext, diag: ecx.diag, ds: def_to_str, tcx: ecx.tcx, - reachable: |a| r.contains(&a), - abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)}; + abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs) + }; tyencode::enc_vstore(ebml_w.writer, ty_str_ctxt, vstore); } @@ -264,8 +261,8 @@ fn encode_method_fty(ecx: &EncodeContext, diag: ecx.diag, ds: def_to_str, tcx: ecx.tcx, - reachable: |a| r.contains(&a), - abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)}; + abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs) + }; tyencode::enc_bare_fn_ty(ebml_w.writer, ty_str_ctxt, typ); ebml_w.end_tag(); @@ -780,13 +777,6 @@ fn encode_info_for_item(ecx: &EncodeContext, index: @mut ~[entry], path: &[ast_map::path_elt]) { let tcx = ecx.tcx; - let must_write = - match item.node { - item_enum(_, _) | item_impl(*) | item_trait(*) | item_struct(*) | - item_mod(*) | item_foreign_mod(*) | item_static(*) => true, - _ => false - }; - if !must_write && !reachable(ecx, item.id) { return; } fn add_to_index_(item: @item, ebml_w: &writer::Encoder, index: @mut ~[entry]) { @@ -898,23 +888,6 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_type_param_bounds(ebml_w, ecx, &generics.ty_params); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); - // If this is a tuple- or enum-like struct, encode the type of the - // constructor. - if struct_def.fields.len() > 0 && - struct_def.fields[0].node.kind == ast::unnamed_field { - let ctor_id = match struct_def.ctor_id { - Some(ctor_id) => ctor_id, - None => ecx.tcx.sess.bug("struct def didn't have ctor id"), - }; - - encode_info_for_struct_ctor(ecx, - ebml_w, - path, - item.ident, - ctor_id, - index); - } - encode_name(ecx, ebml_w, item.ident); encode_attributes(ebml_w, item.attrs); encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); @@ -944,6 +917,23 @@ fn encode_info_for_item(ecx: &EncodeContext, let bkts = create_index(idx); encode_index(ebml_w, bkts, write_int); ebml_w.end_tag(); + + // If this is a tuple- or enum-like struct, encode the type of the + // constructor. + if struct_def.fields.len() > 0 && + struct_def.fields[0].node.kind == ast::unnamed_field { + let ctor_id = match struct_def.ctor_id { + Some(ctor_id) => ctor_id, + None => ecx.tcx.sess.bug("struct def didn't have ctor id"), + }; + + encode_info_for_struct_ctor(ecx, + ebml_w, + path, + item.ident, + ctor_id, + index); + } } item_impl(ref generics, opt_trait, ty, ref methods) => { add_to_index(); @@ -1092,7 +1082,6 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext, index: @mut ~[entry], path: ast_map::path, abi: AbiSet) { - if !reachable(ecx, nitem.id) { return; } index.push(entry { val: nitem.id, pos: ebml_w.writer.tell() }); ebml_w.start_tag(tag_items_data_item); @@ -1162,6 +1151,12 @@ fn encode_info_for_items(ecx: &EncodeContext, visit::visit_foreign_item(ni, (cx, v)); match items.get_copy(&ni.id) { ast_map::node_foreign_item(_, abi, _, pt) => { + debug!("writing foreign item %s::%s", + ast_map::path_to_str( + *pt, + token::get_ident_interner()), + *token::ident_to_str(&ni.ident)); + let mut ebml_w = copy ebml_w; // See above let ecx : &EncodeContext = unsafe { cast::transmute(ecx_ptr) }; @@ -1466,7 +1461,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] { total_bytes: 0, n_inlines: 0 }; - let EncodeParams{item_symbols, diag, tcx, reachable, reexports2, + let EncodeParams{item_symbols, diag, tcx, reexports2, discrim_symbols, cstore, encode_inlined_item, link_meta, _} = parms; let type_abbrevs = @mut HashMap::new(); @@ -1475,7 +1470,6 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] { diag: diag, tcx: tcx, stats: stats, - reachable: reachable, reexports2: reexports2, item_symbols: item_symbols, discrim_symbols: discrim_symbols, @@ -1557,7 +1551,6 @@ pub fn encoded_ty(tcx: ty::ctxt, t: ty::t) -> ~str { diag: tcx.diag, ds: def_to_str, tcx: tcx, - reachable: |_id| false, abbrevs: tyencode::ac_no_abbrevs}; do io::with_str_writer |wr| { tyencode::enc_ty(wr, cx, t); diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 0f9937fd3c0..fb72617b743 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -31,7 +31,6 @@ pub struct ctxt { ds: @fn(def_id) -> ~str, // The type context. tcx: ty::ctxt, - reachable: @fn(node_id) -> bool, abbrevs: abbrev_ctxt } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index fb8238b84d6..70e94844319 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -368,14 +368,17 @@ impl tr for ast::def { ast::def_static_method(did.tr(xcx), did2_opt.map(|did2| did2.tr(xcx)), p) - }, - ast::def_self_ty(nid) => ast::def_self_ty(xcx.tr_id(nid)), - ast::def_self(nid, i) => ast::def_self(xcx.tr_id(nid), i), - ast::def_mod(did) => ast::def_mod(did.tr(xcx)), - ast::def_foreign_mod(did) => ast::def_foreign_mod(did.tr(xcx)), - ast::def_static(did, m) => ast::def_static(did.tr(xcx), m), - ast::def_arg(nid, b) => ast::def_arg(xcx.tr_id(nid), b), - ast::def_local(nid, b) => ast::def_local(xcx.tr_id(nid), b), + } + ast::def_method(did0, did1) => { + ast::def_method(did0.tr(xcx), did1.map(|did1| did1.tr(xcx))) + } + ast::def_self_ty(nid) => { ast::def_self_ty(xcx.tr_id(nid)) } + ast::def_self(nid, i) => { ast::def_self(xcx.tr_id(nid), i) } + ast::def_mod(did) => { ast::def_mod(did.tr(xcx)) } + ast::def_foreign_mod(did) => { ast::def_foreign_mod(did.tr(xcx)) } + ast::def_static(did, m) => { ast::def_static(did.tr(xcx), m) } + ast::def_arg(nid, b) => { ast::def_arg(xcx.tr_id(nid), b) } + ast::def_local(nid, b) => { ast::def_local(xcx.tr_id(nid), b) } ast::def_variant(e_did, v_did) => { ast::def_variant(e_did.tr(xcx), v_did.tr(xcx)) }, @@ -692,12 +695,12 @@ trait get_ty_str_ctxt { impl<'self> get_ty_str_ctxt for e::EncodeContext<'self> { fn ty_str_ctxt(&self) -> @tyencode::ctxt { - let r = self.reachable; - @tyencode::ctxt {diag: self.tcx.sess.diagnostic(), - ds: e::def_to_str, - tcx: self.tcx, - reachable: |a| r.contains(&a), - abbrevs: tyencode::ac_use_abbrevs(self.type_abbrevs)} + @tyencode::ctxt { + diag: self.tcx.sess.diagnostic(), + ds: e::def_to_str, + tcx: self.tcx, + abbrevs: tyencode::ac_use_abbrevs(self.type_abbrevs) + } } } diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs new file mode 100644 index 00000000000..1361f2c245a --- /dev/null +++ b/src/librustc/middle/reachable.rs @@ -0,0 +1,414 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Finds items that are externally reachable, to determine which items +// need to have their metadata (and possibly their AST) serialized. +// All items that can be referred to through an exported name are +// reachable, and when a reachable thing is inline or generic, it +// makes all other generics or inline functions that it references +// reachable as well. + +use core::prelude::*; +use core::iterator::IteratorUtil; + +use middle::resolve; +use middle::ty; +use middle::typeck; + +use core::hashmap::HashSet; +use syntax::ast::*; +use syntax::ast; +use syntax::ast_map; +use syntax::ast_util::def_id_of_def; +use syntax::attr; +use syntax::codemap; +use syntax::parse::token; +use syntax::visit::Visitor; +use syntax::visit; + +// Returns true if the given set of attributes contains the `#[inline]` +// attribute. +fn attributes_specify_inlining(attrs: &[attribute]) -> bool { + attr::attrs_contains_name(attrs, "inline") +} + +// Returns true if the given set of generics implies that the item it's +// associated with must be inlined. +fn generics_require_inlining(generics: &Generics) -> bool { + !generics.ty_params.is_empty() +} + +// Returns true if the given item must be inlined because it may be +// monomorphized or it was marked with `#[inline]`. This will only return +// true for functions. +fn item_might_be_inlined(item: @item) -> bool { + if attributes_specify_inlining(item.attrs) { + return true + } + + match item.node { + item_fn(_, _, _, ref generics, _) => { + generics_require_inlining(generics) + } + _ => false, + } +} + +// Returns true if the given type method must be inlined because it may be +// monomorphized or it was marked with `#[inline]`. +fn ty_method_might_be_inlined(ty_method: &ty_method) -> bool { + attributes_specify_inlining(ty_method.attrs) || + generics_require_inlining(&ty_method.generics) +} + +// Returns true if the given trait method must be inlined because it may be +// monomorphized or it was marked with `#[inline]`. +fn trait_method_might_be_inlined(trait_method: &trait_method) -> bool { + match *trait_method { + required(ref ty_method) => ty_method_might_be_inlined(ty_method), + provided(_) => true + } +} + +// Information needed while computing reachability. +struct ReachableContext { + // The type context. + tcx: ty::ctxt, + // The method map, which links node IDs of method call expressions to the + // methods they've been resolved to. + method_map: typeck::method_map, + // The set of items which must be exported in the linkage sense. + reachable_symbols: @mut HashSet, + // A worklist of item IDs. Each item ID in this worklist will be inlined + // and will be scanned for further references. + worklist: @mut ~[node_id], +} + +impl ReachableContext { + // Creates a new reachability computation context. + fn new(tcx: ty::ctxt, method_map: typeck::method_map) + -> ReachableContext { + ReachableContext { + tcx: tcx, + method_map: method_map, + reachable_symbols: @mut HashSet::new(), + worklist: @mut ~[], + } + } + + // Step 1: Mark all public symbols, and add all public symbols that might + // be inlined to a worklist. + fn mark_public_symbols(&self, crate: @crate) { + let reachable_symbols = self.reachable_symbols; + let worklist = self.worklist; + let visitor = visit::mk_vt(@Visitor { + visit_item: |item, _, visitor| { + match item.node { + item_fn(*) => { + reachable_symbols.insert(item.id); + if item_might_be_inlined(item) { + worklist.push(item.id) + } + } + item_struct(ref struct_def, _) => { + match struct_def.ctor_id { + None => {} + Some(ctor_id) => { + reachable_symbols.insert(ctor_id); + } + } + } + item_enum(ref enum_def, _) => { + for enum_def.variants.each |variant| { + reachable_symbols.insert(variant.node.id); + } + } + item_impl(ref generics, trait_ref, _, ref methods) => { + // XXX(pcwalton): We conservatively assume any methods + // on a trait implementation are reachable, when this + // is not the case. We could be more precise by only + // treating implementations of reachable or cross- + // crate traits as reachable. + + // Mark all public methods as reachable. + for methods.each |method| { + if method.vis == public || trait_ref.is_some() { + reachable_symbols.insert(method.id); + } + } + + if generics_require_inlining(generics) { + // If the impl itself has generics, add all public + // symbols to the worklist. + for methods.each |method| { + if method.vis == public || + trait_ref.is_some() { + worklist.push(method.id) + } + } + } else { + // Otherwise, add only public methods that have + // generics to the worklist. + for methods.each |method| { + let generics = &method.generics; + let attrs = &method.attrs; + if generics_require_inlining(generics) || + attributes_specify_inlining(*attrs) || + method.vis == public || + trait_ref.is_some() { + worklist.push(method.id) + } + } + } + } + item_trait(_, _, ref trait_methods) => { + // Mark all provided methods as reachable. + for trait_methods.each |trait_method| { + match *trait_method { + provided(method) => { + reachable_symbols.insert(method.id); + worklist.push(method.id) + } + required(_) => {} + } + } + } + _ => {} + } + + if item.vis == public { + visit::visit_item(item, (), visitor) + } + }, + .. *visit::default_visitor() + }); + + visit::visit_crate(crate, (), visitor) + } + + // Returns true if the given def ID represents a local item that is + // eligible for inlining and false otherwise. + fn def_id_represents_local_inlined_item(tcx: ty::ctxt, def_id: def_id) + -> bool { + if def_id.crate != local_crate { + return false + } + + let node_id = def_id.node; + match tcx.items.find(&node_id) { + Some(&ast_map::node_item(item, _)) => { + match item.node { + item_fn(*) => item_might_be_inlined(item), + _ => false, + } + } + Some(&ast_map::node_trait_method(trait_method, _, _)) => { + match *trait_method { + required(_) => false, + provided(_) => true, + } + } + Some(&ast_map::node_method(method, impl_did, _)) => { + if generics_require_inlining(&method.generics) || + attributes_specify_inlining(method.attrs) { + true + } else { + // Check the impl. If the generics on the self type of the + // impl require inlining, this method does too. + assert!(impl_did.crate == local_crate); + match tcx.items.find(&impl_did.node) { + Some(&ast_map::node_item(item, _)) => { + match item.node { + item_impl(ref generics, _, _, _) => { + generics_require_inlining(generics) + } + _ => false + } + } + Some(_) => { + tcx.sess.span_bug(method.span, + "method is not inside an \ + impl?!") + } + None => { + tcx.sess.span_bug(method.span, + "the impl that this method is \ + supposedly inside of doesn't \ + exist in the AST map?!") + } + } + } + } + Some(_) => false, + None => tcx.sess.bug("def ID not in def map?!"), + } + } + + // Helper function to set up a visitor for `propagate()` below. + fn init_visitor(&self) -> visit::vt<()> { + let (worklist, method_map) = (self.worklist, self.method_map); + let (tcx, reachable_symbols) = (self.tcx, self.reachable_symbols); + visit::mk_vt(@visit::Visitor { + visit_expr: |expr, _, visitor| { + match expr.node { + expr_path(_) => { + let def = match tcx.def_map.find(&expr.id) { + Some(&def) => def, + None => { + tcx.sess.span_bug(expr.span, + "def ID not in def map?!") + } + }; + + let def_id = def_id_of_def(def); + if ReachableContext:: + def_id_represents_local_inlined_item(tcx, + def_id) { + worklist.push(def_id.node) + } + reachable_symbols.insert(def_id.node); + } + expr_method_call(*) => { + match method_map.find(&expr.id) { + Some(&typeck::method_map_entry { + origin: typeck::method_static(def_id), + _ + }) => { + if ReachableContext:: + def_id_represents_local_inlined_item( + tcx, + def_id) { + worklist.push(def_id.node) + } + reachable_symbols.insert(def_id.node); + } + Some(_) => {} + None => { + tcx.sess.span_bug(expr.span, + "method call expression \ + not in method map?!") + } + } + } + _ => {} + } + + visit::visit_expr(expr, (), visitor) + }, + ..*visit::default_visitor() + }) + } + + // Step 2: Mark all symbols that the symbols on the worklist touch. + fn propagate(&self) { + let visitor = self.init_visitor(); + let mut scanned = HashSet::new(); + while self.worklist.len() > 0 { + let search_item = self.worklist.pop(); + if scanned.contains(&search_item) { + loop + } + scanned.insert(search_item); + self.reachable_symbols.insert(search_item); + + // Find the AST block corresponding to the item and visit it, + // marking all path expressions that resolve to something + // interesting. + match self.tcx.items.find(&search_item) { + Some(&ast_map::node_item(item, _)) => { + match item.node { + item_fn(_, _, _, _, ref search_block) => { + visit::visit_block(search_block, (), visitor) + } + _ => { + self.tcx.sess.span_bug(item.span, + "found non-function item \ + in worklist?!") + } + } + } + Some(&ast_map::node_trait_method(trait_method, _, _)) => { + match *trait_method { + required(ref ty_method) => { + self.tcx.sess.span_bug(ty_method.span, + "found required method in \ + worklist?!") + } + provided(ref method) => { + visit::visit_block(&method.body, (), visitor) + } + } + } + Some(&ast_map::node_method(ref method, _, _)) => { + visit::visit_block(&method.body, (), visitor) + } + Some(_) => { + let ident_interner = token::get_ident_interner(); + let desc = ast_map::node_id_to_str(self.tcx.items, + search_item, + ident_interner); + self.tcx.sess.bug(fmt!("found unexpected thingy in \ + worklist: %s", + desc)) + } + None => { + self.tcx.sess.bug(fmt!("found unmapped ID in worklist: \ + %d", + search_item)) + } + } + } + } + + // Step 3: Mark all destructors as reachable. + // + // XXX(pcwalton): This is a conservative overapproximation, but fixing + // this properly would result in the necessity of computing *type* + // reachability, which might result in a compile time loss. + fn mark_destructors_reachable(&self) { + for self.tcx.destructor_for_type.each |_, destructor_def_id| { + if destructor_def_id.crate == local_crate { + self.reachable_symbols.insert(destructor_def_id.node); + } + } + } +} + +pub fn find_reachable(tcx: ty::ctxt, + method_map: typeck::method_map, + crate: @crate) + -> @mut HashSet { + // XXX(pcwalton): We only need to mark symbols that are exported. But this + // is more complicated than just looking at whether the symbol is `pub`, + // because it might be the target of a `pub use` somewhere. For now, I + // think we are fine, because you can't `pub use` something that wasn't + // exported due to the bug whereby `use` only looks through public + // modules even if you're inside the module the `use` appears in. When + // this bug is fixed, however, this code will need to be updated. Probably + // the easiest way to fix this (although a conservative overapproximation) + // is to have the name resolution pass mark all targets of a `pub use` as + // "must be reachable". + + let reachable_context = ReachableContext::new(tcx, method_map); + + // Step 1: Mark all public symbols, and add all public symbols that might + // be inlined to a worklist. + reachable_context.mark_public_symbols(crate); + + // Step 2: Mark all symbols that the symbols on the worklist touch. + reachable_context.propagate(); + + // Step 3: Mark all destructors as reachable. + reachable_context.mark_destructors_reachable(); + + // Return the set of reachable symbols. + reachable_context.reachable_symbols +} + diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index bf37ce676a8..224cf7c32a8 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -652,21 +652,9 @@ impl NameBindings { match self.type_def { None => None, Some(ref type_def) => { - // FIXME (#3784): This is reallllly questionable. - // Perhaps the right thing to do is to merge def_mod - // and def_ty. match (*type_def).type_def { Some(type_def) => Some(type_def), - None => { - match (*type_def).module_def { - Some(module_def) => { - let module_def = &mut *module_def; - module_def.def_id.map(|def_id| - def_mod(*def_id)) - } - None => None - } - } + None => None, } } } @@ -1230,49 +1218,29 @@ impl Resolver { visit_item(item, (new_parent, visitor)); } - item_impl(_, trait_ref_opt, ty, ref methods) => { - // If this implements an anonymous trait and it has static - // methods, then add all the static methods within to a new - // module, if the type was defined within this module. + item_impl(_, None, ty, ref methods) => { + // If this implements an anonymous trait, then add all the + // methods within to a new module, if the type was defined + // within this module. // // FIXME (#3785): This is quite unsatisfactory. Perhaps we // should modify anonymous traits to only be implementable in // the same module that declared the type. - // Bail out early if there are no static methods. - let mut methods_seen = HashMap::new(); - let mut has_static_methods = false; - for methods.iter().advance |method| { - match method.explicit_self.node { - sty_static => has_static_methods = true, - _ => { - // Make sure you can't define duplicate methods - let ident = method.ident; - let span = method.span; - let old_sp = methods_seen.find_or_insert(ident, span); - if *old_sp != span { - self.session.span_err(span, - fmt!("duplicate definition of method `%s`", - self.session.str_of(ident))); - self.session.span_note(*old_sp, - fmt!("first definition of method `%s` here", - self.session.str_of(ident))); - } - } - } - } - - // If there are static methods, then create the module - // and add them. - match (trait_ref_opt, ty) { - (None, @Ty { node: ty_path(path, _, _), _ }) if - has_static_methods && path.idents.len() == 1 => { + // Create the module and add all methods. + match *ty { + Ty { + node: ty_path(path, _), + _ + } if path.idents.len() == 1 => { let name = path_to_ident(path); let new_parent = match parent.children.find(&name) { // It already exists - Some(&child) if child.get_module_if_available().is_some() && - child.get_module().kind == ImplModuleKind => { + Some(&child) if child.get_module_if_available() + .is_some() && + child.get_module().kind == + ImplModuleKind => { ModuleReducedGraphParent(child.get_module()) } // Create the module @@ -1283,8 +1251,8 @@ impl Resolver { ForbidDuplicateModules, sp); - let parent_link = self.get_parent_link(new_parent, - ident); + let parent_link = + self.get_parent_link(new_parent, ident); let def_id = local_def(item.id); name_bindings.define_module(Public, parent_link, @@ -1292,30 +1260,36 @@ impl Resolver { ImplModuleKind, sp); - ModuleReducedGraphParent(name_bindings.get_module()) + ModuleReducedGraphParent( + name_bindings.get_module()) } }; - // For each static method... + // For each method... for methods.iter().advance |method| { - match method.explicit_self.node { + // Add the method to the module. + let ident = method.ident; + let (method_name_bindings, _) = + self.add_child(ident, + new_parent, + ForbidDuplicateValues, + method.span); + let def = match method.explicit_self.node { sty_static => { - // Add the static method to the - // module. - let ident = method.ident; - let (method_name_bindings, _) = - self.add_child( - ident, - new_parent, - ForbidDuplicateValues, - method.span); - let def = def_fn(local_def(method.id), - method.purity); - method_name_bindings.define_value( - Public, def, method.span); + // Static methods become `def_fn`s. + def_fn(local_def(method.id), + method.purity) } - _ => {} - } + _ => { + // Non-static methods become + // `def_method`s. + def_method(local_def(method.id), None) + } + }; + + method_name_bindings.define_value(Public, + def, + method.span); } } _ => {} @@ -1324,41 +1298,23 @@ impl Resolver { visit_item(item, (parent, visitor)); } + item_impl(_, Some(_), ty, ref methods) => { + visit_item(item, parent, visitor); + } + item_trait(_, _, ref methods) => { let (name_bindings, new_parent) = self.add_child(ident, parent, ForbidDuplicateTypes, sp); - // If the trait has static methods, then add all the static - // methods within to a new module. - // - // We only need to create the module if the trait has static - // methods, so check that first. - let mut has_static_methods = false; - for (*methods).iter().advance |method| { - let ty_m = trait_method_to_ty_method(method); - match ty_m.explicit_self.node { - sty_static => { - has_static_methods = true; - break; - } - _ => {} - } - } - - // Create the module if necessary. - let module_parent_opt; - if has_static_methods { - let parent_link = self.get_parent_link(parent, ident); - name_bindings.define_module(privacy, - parent_link, - Some(local_def(item.id)), - TraitModuleKind, - sp); - module_parent_opt = Some(ModuleReducedGraphParent( - name_bindings.get_module())); - } else { - module_parent_opt = None; - } + // Add all the methods within to a new module. + let parent_link = self.get_parent_link(parent, ident); + name_bindings.define_module(privacy, + parent_link, + Some(local_def(item.id)), + TraitModuleKind, + sp); + let module_parent = ModuleReducedGraphParent(name_bindings. + get_module()); // Add the names of all the methods to the trait info. let mut method_names = HashMap::new(); @@ -1366,35 +1322,34 @@ impl Resolver { let ty_m = trait_method_to_ty_method(method); let ident = ty_m.ident; - // Add it to the trait info if not static, - // add it as a name in the trait module otherwise. - match ty_m.explicit_self.node { - sty_static => { - let def = def_static_method( - local_def(ty_m.id), - Some(local_def(item.id)), - ty_m.purity); - let (method_name_bindings, _) = - self.add_child(ident, - module_parent_opt.get(), - ForbidDuplicateValues, - ty_m.span); - method_name_bindings.define_value(Public, - def, - ty_m.span); + // Add it as a name in the trait module. + let def = match ty_m.explicit_self.node { + sty_static => { + // Static methods become `def_static_method`s. + def_static_method(local_def(ty_m.id), + Some(local_def(item.id)), + ty_m.purity) } _ => { - // Make sure you can't define duplicate methods - let old_sp = method_names.find_or_insert(ident, ty_m.span); - if *old_sp != ty_m.span { - self.session.span_err(ty_m.span, - fmt!("duplicate definition of method `%s`", - self.session.str_of(ident))); - self.session.span_note(*old_sp, - fmt!("first definition of method `%s` here", - self.session.str_of(ident))); - } + // Non-static methods become `def_method`s. + def_method(local_def(ty_m.id), + Some(local_def(item.id))) + } + }; + + let (method_name_bindings, _) = + self.add_child(ident, + module_parent, + ForbidDuplicateValues, + ty_m.span); + method_name_bindings.define_value(Public, def, ty_m.span); + + // Add it to the trait info if not static. + match ty_m.explicit_self.node { + sty_static => {} + _ => { + method_names.insert(ident); } } } @@ -1751,6 +1706,9 @@ impl Resolver { child_name_bindings.define_type(privacy, def, dummy_sp()); self.structs.insert(def_id); } + def_method(*) => { + // Ignored; handled elsewhere. + } def_self(*) | def_arg(*) | def_local(*) | def_prim_ty(*) | def_ty_param(*) | def_binding(*) | def_use(*) | def_upvar(*) | def_region(*) | @@ -2391,7 +2349,8 @@ impl Resolver { } match type_result { BoundResult(target_module, name_bindings) => { - debug!("(resolving single import) found type target"); + debug!("(resolving single import) found type target: %?", + name_bindings.type_def.get().type_def); import_resolution.type_target = Some(Target(target_module, name_bindings)); import_resolution.type_id = directive.id; @@ -3269,22 +3228,8 @@ impl Resolver { pub fn add_exports_for_module(@mut self, exports2: &mut ~[Export2], module_: @mut Module) { - for module_.children.iter().advance |(ident, namebindings)| { - debug!("(computing exports) maybe export '%s'", - self.session.str_of(*ident)); - self.add_exports_of_namebindings(&mut *exports2, - *ident, - *namebindings, - TypeNS, - false); - self.add_exports_of_namebindings(&mut *exports2, - *ident, - *namebindings, - ValueNS, - false); - } - - for module_.import_resolutions.iter().advance |(ident, importresolution)| { + for module_.import_resolutions.iter().advance |ident, + importresolution| { if importresolution.privacy != Public { debug!("(computing exports) not reexporting private `%s`", self.session.str_of(*ident)); @@ -4518,8 +4463,8 @@ impl Resolver { if path.global { return self.resolve_crate_relative_path(path, - self.xray_context, - namespace); + self.xray_context, + namespace); } if path.idents.len() > 1 { @@ -4947,6 +4892,22 @@ impl Resolver { // Write the result into the def map. debug!("(resolving expr) resolved `%s`", self.idents_to_str(path.idents)); + + // First-class methods are not supported yet; error + // out here. + match def { + def_method(*) => { + self.session.span_err(expr.span, + "first-class methods \ + are not supported"); + self.session.span_note(expr.span, + "call the method \ + using the `.` \ + syntax"); + } + _ => {} + } + self.record_def(expr.id, def); } None => { @@ -5415,7 +5376,7 @@ pub fn resolve_crate(session: Session, -> CrateMap { let resolver = @mut Resolver(session, lang_items, crate); resolver.resolve(); - let Resolver{def_map, export_map2, trait_map, _} = copy *resolver; + let Resolver { def_map, export_map2, trait_map, _ } = copy *resolver; CrateMap { def_map: def_map, exp_map2: export_map2, diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 251add59326..53d0118aa0e 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -54,7 +54,6 @@ use middle::trans::machine; use middle::trans::machine::{llalign_of_min, llsize_of}; use middle::trans::meth; use middle::trans::monomorphize; -use middle::trans::reachable; use middle::trans::tvec; use middle::trans::type_of; use middle::trans::type_of::*; @@ -2437,7 +2436,6 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::node_id) -> ValueRef { } } ast_map::node_method(m, _, pth) => { - exprt = true; register_method(ccx, id, pth, m) } ast_map::node_foreign_item(ni, _, _, pth) => { @@ -2511,7 +2509,7 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::node_id) -> ValueRef { variant)) } }; - if !(exprt || ccx.reachable.contains(&id)) { + if !exprt { lib::llvm::SetLinkage(val, lib::llvm::InternalLinkage); } ccx.item_vals.insert(id, val); @@ -2890,16 +2888,12 @@ pub fn trans_crate(sess: session::Session, tcx: ty::ctxt, output: &Path, emap2: resolve::ExportMap2, - maps: astencode::Maps) -> (ContextRef, ModuleRef, LinkMeta) { + reachable_map: @mut HashSet, + maps: astencode::Maps) + -> (ContextRef, ModuleRef, LinkMeta) { let mut symbol_hasher = hash::default_state(); let link_meta = link::build_link_meta(sess, crate, output, &mut symbol_hasher); - let reachable = reachable::find_reachable( - &crate.node.module, - emap2, - tcx, - maps.method_map - ); // Append ".rc" to crate name as LLVM module identifier. // diff --git a/src/librustc/middle/trans/callee.rs b/src/librustc/middle/trans/callee.rs index 045c8ec01aa..3405db8e52f 100644 --- a/src/librustc/middle/trans/callee.rs +++ b/src/librustc/middle/trans/callee.rs @@ -146,7 +146,7 @@ pub fn trans(bcx: block, expr: @ast::expr) -> Callee { ast::def_static(*) | ast::def_ty(*) | ast::def_prim_ty(*) | ast::def_use(*) | ast::def_typaram_binder(*) | ast::def_region(*) | ast::def_label(*) | ast::def_ty_param(*) | - ast::def_self_ty(*) => { + ast::def_self_ty(*) | ast::def_method(*) => { bcx.tcx().sess.span_bug( ref_expr.span, fmt!("Cannot translate def %? \ diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 4e3583a4095..94a314862cb 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -117,7 +117,84 @@ pub fn BuilderRef_res(B: BuilderRef) -> BuilderRef_res { } } -pub type ExternMap = HashMap<@str, ValueRef>; +pub type ExternMap = @mut HashMap<@str, ValueRef>; + +// Crate context. Every crate we compile has one of these. +pub struct CrateContext { + sess: session::Session, + llmod: ModuleRef, + td: TargetData, + tn: @TypeNames, + externs: ExternMap, + intrinsics: HashMap<&'static str, ValueRef>, + item_vals: @mut HashMap, + exp_map2: resolve::ExportMap2, + item_symbols: @mut HashMap, + link_meta: LinkMeta, + enum_sizes: @mut HashMap, + discrims: @mut HashMap, + discrim_symbols: @mut HashMap, + tydescs: @mut HashMap, + // Set when running emit_tydescs to enforce that no more tydescs are + // created. + finished_tydescs: @mut bool, + // Track mapping of external ids to local items imported for inlining + external: @mut HashMap>, + // Cache instances of monomorphized functions + monomorphized: @mut HashMap, + monomorphizing: @mut HashMap, + // Cache computed type parameter uses (see type_use.rs) + type_use_cache: @mut HashMap, + // Cache generated vtables + vtables: @mut HashMap, + // Cache of constant strings, + const_cstr_cache: @mut HashMap<@str, ValueRef>, + + // Reverse-direction for const ptrs cast from globals. + // Key is an int, cast from a ValueRef holding a *T, + // Val is a ValueRef holding a *[T]. + // + // Needed because LLVM loses pointer->pointee association + // when we ptrcast, and we have to ptrcast during translation + // of a [T] const because we form a slice, a [*T,int] pair, not + // a pointer to an LLVM array type. + const_globals: @mut HashMap, + + // Cache of emitted const values + const_values: @mut HashMap, + + // Cache of external const values + extern_const_values: @mut HashMap, + + module_data: @mut HashMap<~str, ValueRef>, + lltypes: @mut HashMap, + llsizingtypes: @mut HashMap, + adt_reprs: @mut HashMap, + names: namegen, + next_addrspace: addrspace_gen, + symbol_hasher: @mut hash::State, + type_hashcodes: @mut HashMap, + type_short_names: @mut HashMap, + all_llvm_symbols: @mut HashSet<@str>, + tcx: ty::ctxt, + maps: astencode::Maps, + stats: @mut Stats, + upcalls: @upcall::Upcalls, + tydesc_type: TypeRef, + int_type: TypeRef, + float_type: TypeRef, + opaque_vec_type: TypeRef, + builder: BuilderRef_res, + shape_cx: shape::Ctxt, + crate_map: ValueRef, + // Set when at least one function uses GC. Needed so that + // decl_gc_metadata knows whether to link to the module metadata, which + // is not emitted by LLVM's GC pass when no functions use GC. + uses_gc: @mut bool, + dbg_cx: Option, + do_not_commit_warning_issued: @mut bool, + reachable_map: @mut HashSet, +} // Types used for llself. pub struct ValSelfData { diff --git a/src/librustc/middle/trans/mod.rs b/src/librustc/middle/trans/mod.rs index c2a25d80998..64d6bbec87c 100644 --- a/src/librustc/middle/trans/mod.rs +++ b/src/librustc/middle/trans/mod.rs @@ -37,7 +37,6 @@ pub mod foreign; pub mod reflect; pub mod debuginfo; pub mod type_use; -pub mod reachable; pub mod machine; pub mod adt; pub mod asm; diff --git a/src/librustc/middle/trans/reachable.rs b/src/librustc/middle/trans/reachable.rs deleted file mode 100644 index e950c24c49e..00000000000 --- a/src/librustc/middle/trans/reachable.rs +++ /dev/null @@ -1,246 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Finds items that are externally reachable, to determine which items -// need to have their metadata (and possibly their AST) serialized. -// All items that can be referred to through an exported name are -// reachable, and when a reachable thing is inline or generic, it -// makes all other generics or inline functions that it references -// reachable as well. - -use core::prelude::*; - -use middle::resolve; -use middle::ty; -use middle::typeck; - -use core::hashmap::HashSet; -use syntax::ast; -use syntax::ast::*; -use syntax::ast_util::def_id_of_def; -use syntax::attr; -use syntax::codemap; -use syntax::print::pprust::expr_to_str; -use syntax::{visit, ast_map}; - -pub type map = @HashSet; - -struct ctx<'self> { - exp_map2: resolve::ExportMap2, - tcx: ty::ctxt, - method_map: typeck::method_map, - rmap: &'self mut HashSet, -} - -pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2, - tcx: ty::ctxt, method_map: typeck::method_map) -> map { - let mut rmap = HashSet::new(); - { - let cx = @mut ctx { - exp_map2: exp_map2, - tcx: tcx, - method_map: method_map, - rmap: &mut rmap - }; - traverse_public_mod(cx, ast::crate_node_id, crate_mod); - traverse_all_resources_and_impls(cx, crate_mod); - } - return @rmap; -} - -fn traverse_exports(cx: @mut ctx, mod_id: node_id) -> bool { - let mut found_export = false; - match cx.exp_map2.find(&mod_id) { - Some(ref exp2s) => { - for (*exp2s).iter().advance |e2| { - found_export = true; - traverse_def_id(cx, e2.def_id) - }; - } - None => () - } - return found_export; -} - -fn traverse_def_id(cx: @mut ctx, did: def_id) { - if did.crate != local_crate { return; } - match cx.tcx.items.find(&did.node) { - None => (), // This can happen for self, for example - Some(&ast_map::node_item(item, _)) => traverse_public_item(cx, item), - Some(&ast_map::node_method(_, impl_id, _)) => traverse_def_id(cx, impl_id), - Some(&ast_map::node_foreign_item(item, _, _, _)) => { - let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut - cx.rmap.insert(item.id); - } - Some(&ast_map::node_variant(ref v, _, _)) => { - let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut - cx.rmap.insert(v.node.id); - } - _ => () - } -} - -fn traverse_public_mod(cx: @mut ctx, mod_id: node_id, m: &_mod) { - if !traverse_exports(cx, mod_id) { - // No exports, so every local item is exported - for m.items.iter().advance |item| { - traverse_public_item(cx, *item); - } - } -} - -fn traverse_public_item(cx: @mut ctx, item: @item) { - { - // FIXME #6021: naming rmap shouldn't be necessary - let cx = &mut *cx; - let rmap: &mut HashSet = cx.rmap; - if rmap.contains(&item.id) { return; } - rmap.insert(item.id); - } - - match item.node { - item_mod(ref m) => traverse_public_mod(cx, item.id, m), - item_foreign_mod(ref nm) => { - if !traverse_exports(cx, item.id) { - for nm.items.iter().advance |item| { - let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut - cx.rmap.insert(item.id); - } - } - } - item_fn(_, _, _, ref generics, ref blk) => { - if generics.ty_params.len() > 0u || - attr::find_inline_attr(item.attrs) != attr::ia_none { - traverse_inline_body(cx, blk); - } - } - item_impl(ref generics, _, _, ref ms) => { - for ms.iter().advance |m| { - if generics.ty_params.len() > 0u || - m.generics.ty_params.len() > 0u || - attr::find_inline_attr(m.attrs) != attr::ia_none - { - { - let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut - cx.rmap.insert(m.id); - } - traverse_inline_body(cx, &m.body); - } - } - } - item_struct(ref struct_def, _) => { - for struct_def.ctor_id.iter().advance |&ctor_id| { - let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut - cx.rmap.insert(ctor_id); - } - } - item_ty(t, _) => { - traverse_ty(t, (cx, - visit::mk_vt(@visit::Visitor {visit_ty: traverse_ty, - ..*visit::default_visitor()}))) - } - item_static(*) | - item_enum(*) | item_trait(*) => (), - item_mac(*) => fail!("item macros unimplemented") - } -} - -fn traverse_ty<'a>(ty: @Ty, (cx, v): (@mut ctx<'a>, visit::vt<@mut ctx<'a>>)) { - { - let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut - if cx.rmap.contains(&ty.id) { return; } - cx.rmap.insert(ty.id); - } - - match ty.node { - ty_path(p, _bounds, p_id) => { - match cx.tcx.def_map.find(&p_id) { - // Kind of a hack to check this here, but I'm not sure what else - // to do - Some(&def_prim_ty(_)) => { /* do nothing */ } - Some(&d) => traverse_def_id(cx, def_id_of_def(d)), - None => { /* do nothing -- but should we fail here? */ } - } - for p.types.iter().advance |t| { - (v.visit_ty)(*t, (cx, v)); - } - } - _ => visit::visit_ty(ty, (cx, v)) - } -} - -fn traverse_inline_body(cx: @mut ctx, body: &blk) { - fn traverse_expr<'a>(e: @expr, (cx, v): (@mut ctx<'a>, - visit::vt<@mut ctx<'a>>)) { - match e.node { - expr_path(_) => { - match cx.tcx.def_map.find(&e.id) { - Some(&d) => { - traverse_def_id(cx, def_id_of_def(d)); - } - None => cx.tcx.sess.span_bug( - e.span, - fmt!("Unbound node id %? while traversing %s", - e.id, - expr_to_str(e, cx.tcx.sess.intr()))) - } - } - expr_method_call(*) => { - match cx.method_map.find(&e.id) { - Some(&typeck::method_map_entry { - origin: typeck::method_static(did), - _ - }) => { - traverse_def_id(cx, did); - } - Some(_) => {} - None => { - cx.tcx.sess.span_bug(e.span, "expr_method_call not in \ - method map"); - } - } - } - _ => () - } - visit::visit_expr(e, (cx, v)); - } - // Don't ignore nested items: for example if a generic fn contains a - // generic impl (as in deque::create), we need to monomorphize the - // impl as well - fn traverse_item(i: @item, (cx, _v): (@mut ctx, visit::vt<@mut ctx>)) { - traverse_public_item(cx, i); - } - visit::visit_block(body, (cx, visit::mk_vt(@visit::Visitor { - visit_expr: traverse_expr, - visit_item: traverse_item, - ..*visit::default_visitor() - }))); -} - -fn traverse_all_resources_and_impls(cx: @mut ctx, crate_mod: &_mod) { - visit::visit_mod( - crate_mod, - codemap::dummy_sp(), - 0, - (cx, - visit::mk_vt(@visit::Visitor { - visit_expr: |_e, (_cx, _v)| { }, - visit_item: |i, (cx, v)| { - visit::visit_item(i, (cx, v)); - match i.node { - item_impl(*) => { - traverse_public_item(cx, i); - } - _ => () - } - }, - ..*visit::default_visitor() - }))); -} diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 1da76644244..b4a710e15b8 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -791,10 +791,6 @@ impl FnCtxt { ast_ty_to_ty(self, self, ast_t) } - pub fn expr_to_str(&self, expr: @ast::expr) -> ~str { - expr.repr(self.tcx()) - } - pub fn pat_to_str(&self, pat: @ast::pat) -> ~str { pat.repr(self.tcx()) } @@ -3293,6 +3289,9 @@ pub fn ty_param_bounds_and_ty_for_def(fcx: @mut FnCtxt, ast::def_self_ty(*) => { fcx.ccx.tcx.sess.span_bug(sp, "expected value but found self ty"); } + ast::def_method(*) => { + fcx.ccx.tcx.sess.span_bug(sp, "expected value but found method"); + } } } diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index 94520b6faca..5065a475a40 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -81,10 +81,20 @@ pub fn collect_item_types(ccx: @mut CrateCtxt, crate: &ast::crate) { }))); } -impl CrateCtxt { +pub trait ToTy { fn to_ty( - &self, rs: &RS, ast_ty: &ast::Ty) -> ty::t - { + &self, + rs: &RS, + ast_ty: &ast::Ty) + -> ty::t; +} + +impl ToTy for CrateCtxt { + fn to_ty( + &self, + rs: &RS, + ast_ty: &ast::Ty) + -> ty::t { ast_ty_to_ty(self, rs, ast_ty) } } diff --git a/src/librustc/middle/typeck/infer/combine.rs b/src/librustc/middle/typeck/infer/combine.rs index 884f72b57f0..7a4ea0999ef 100644 --- a/src/librustc/middle/typeck/infer/combine.rs +++ b/src/librustc/middle/typeck/infer/combine.rs @@ -64,7 +64,8 @@ use middle::typeck::infer::glb::Glb; use middle::typeck::infer::lub::Lub; use middle::typeck::infer::sub::Sub; use middle::typeck::infer::to_str::InferStr; -use middle::typeck::infer::{cres, InferCtxt, ures}; +use middle::typeck::infer::unify::{InferCtxtMethods, UnifyInferCtxtMethods}; +use middle::typeck::infer::{InferCtxt, cres, ures}; use util::common::indent; use core::result::{iter_vec2, map_vec2}; diff --git a/src/librustc/middle/typeck/infer/lattice.rs b/src/librustc/middle/typeck/infer/lattice.rs index 658fd67ee58..ed19310d5d6 100644 --- a/src/librustc/middle/typeck/infer/lattice.rs +++ b/src/librustc/middle/typeck/infer/lattice.rs @@ -71,15 +71,53 @@ impl LatticeValue for ty::t { } } -impl CombineFields { - pub fn var_sub_var>>(&self, - a_id: - V, - b_id: - V) - -> - ures { +pub trait CombineFieldsLatticeMethods { + fn var_sub_var>>(&self, + a_id: V, + b_id: V) + -> ures; + /// make variable a subtype of T + fn var_sub_t>>( + &self, + a_id: V, + b: T) + -> ures; + fn t_sub_var>>( + &self, + a: T, + b_id: V) + -> ures; + fn merge_bnd( + &self, + a: &Bound, + b: &Bound, + lattice_op: LatticeOp) + -> cres>; + fn set_var_to_merged_bounds>>( + &self, + v_id: V, + a: &Bounds, + b: &Bounds, + rank: uint) + -> ures; + fn bnds( + &self, + a: &Bound, + b: &Bound) + -> ures; +} + +impl CombineFieldsLatticeMethods for CombineFields { + fn var_sub_var>>( + &self, + a_id: V, + b_id: V) + -> ures { /*! * * Make one variable a subtype of another variable. This is a @@ -127,12 +165,12 @@ impl CombineFields { } /// make variable a subtype of T - pub fn var_sub_t>>(&self, - a_id: V, - b: T) - -> ures - { + fn var_sub_t>>( + &self, + a_id: V, + b: T) + -> ures { /*! * * Make a variable (`a_id`) a subtype of the concrete type `b` */ @@ -151,12 +189,12 @@ impl CombineFields { a_id, a_bounds, b_bounds, node_a.rank) } - pub fn t_sub_var>>(&self, - a: T, - b_id: V) - -> ures - { + fn t_sub_var>>( + &self, + a: T, + b_id: V) + -> ures { /*! * * Make a concrete type (`a`) a subtype of the variable `b_id` */ @@ -175,12 +213,12 @@ impl CombineFields { b_id, a_bounds, b_bounds, node_b.rank) } - pub fn merge_bnd(&self, - a: &Bound, - b: &Bound, - lattice_op: - LatticeOp) - -> cres> { + fn merge_bnd( + &self, + a: &Bound, + b: &Bound, + lattice_op: LatticeOp) + -> cres> { /*! * * Combines two bounds into a more general bound. */ @@ -202,14 +240,14 @@ impl CombineFields { } } - pub fn set_var_to_merged_bounds>>( - &self, - v_id: V, - a: &Bounds, - b: &Bounds, - rank: uint) - -> ures { + fn set_var_to_merged_bounds>>( + &self, + v_id: V, + a: &Bounds, + b: &Bounds, + rank: uint) + -> ures { /*! * * Updates the bounds for the variable `v_id` to be the intersection @@ -264,10 +302,10 @@ impl CombineFields { uok() } - pub fn bnds(&self, - a: &Bound, - b: &Bound) - -> ures { + fn bnds(&self, + a: &Bound, + b: &Bound) + -> ures { debug!("bnds(%s <: %s)", a.inf_str(self.infcx), b.inf_str(self.infcx)); let _r = indenter(); diff --git a/src/librustc/middle/typeck/infer/resolve.rs b/src/librustc/middle/typeck/infer/resolve.rs index ab52ef36978..1311907eed2 100644 --- a/src/librustc/middle/typeck/infer/resolve.rs +++ b/src/librustc/middle/typeck/infer/resolve.rs @@ -54,7 +54,7 @@ use middle::ty; use middle::typeck::infer::{Bounds, cyclic_ty, fixup_err, fres, InferCtxt}; use middle::typeck::infer::{region_var_bound_by_region_var, unresolved_ty}; use middle::typeck::infer::to_str::InferStr; -use middle::typeck::infer::unify::Root; +use middle::typeck::infer::unify::{Root, UnifyInferCtxtMethods}; use util::common::{indent, indenter}; use util::ppaux::ty_to_str; diff --git a/src/librustc/middle/typeck/infer/sub.rs b/src/librustc/middle/typeck/infer/sub.rs index 4462d43015c..eb912aa2dda 100644 --- a/src/librustc/middle/typeck/infer/sub.rs +++ b/src/librustc/middle/typeck/infer/sub.rs @@ -18,6 +18,7 @@ use middle::typeck::infer::combine::*; use middle::typeck::infer::cres; use middle::typeck::infer::glb::Glb; use middle::typeck::infer::InferCtxt; +use middle::typeck::infer::lattice::CombineFieldsLatticeMethods; use middle::typeck::infer::lub::Lub; use middle::typeck::infer::to_str::InferStr; use util::common::{indent, indenter}; diff --git a/src/librustc/middle/typeck/infer/unify.rs b/src/librustc/middle/typeck/infer/unify.rs index 371d389f712..a185633a7ac 100644 --- a/src/librustc/middle/typeck/infer/unify.rs +++ b/src/librustc/middle/typeck/infer/unify.rs @@ -40,9 +40,31 @@ pub trait UnifyVid { -> &'v mut ValsAndBindings; } -impl InferCtxt { - pub fn get>(&mut self, vid: V) - -> Node { +pub trait UnifyInferCtxtMethods { + fn get>( + &mut self, + vid: V) + -> Node; + fn set>( + &mut self, + vid: V, + new_v: VarValue); + fn unify>( + &mut self, + node_a: &Node, + node_b: &Node) + -> (V, uint); +} + +impl UnifyInferCtxtMethods for InferCtxt { + fn get>( + &mut self, + vid: V) + -> Node { /*! * * Find the root node for `vid`. This uses the standard @@ -84,10 +106,11 @@ impl InferCtxt { } } - pub fn set>(&mut self, - vid: V, - new_v: VarValue) { + fn set>( + &mut self, + vid: V, + new_v: VarValue) { /*! * * Sets the value for `vid` to `new_v`. `vid` MUST be a root node! @@ -102,11 +125,12 @@ impl InferCtxt { vb.vals.insert(vid.to_uint(), new_v); } - pub fn unify>(&mut self, - node_a: &Node, - node_b: &Node) - -> (V, uint) { + fn unify>( + &mut self, + node_a: &Node, + node_b: &Node) + -> (V, uint) { // Rank optimization: if you don't know what it is, check // out @@ -155,14 +179,31 @@ pub fn mk_err(a_is_expected: bool, } } -impl InferCtxt { - pub fn simple_vars>>(&mut self, - a_is_expected: - bool, - a_id: V, - b_id: V) - -> ures { +pub trait InferCtxtMethods { + fn simple_vars>>( + &mut self, + a_is_expected: bool, + a_id: V, + b_id: V) + -> ures; + fn simple_var_t>>( + &mut self, + a_is_expected: bool, + a_id: V, + b: T) + -> ures; +} + +impl InferCtxtMethods for InferCtxt { + fn simple_vars>>( + &mut self, + a_is_expected: bool, + a_id: V, + b_id: V) + -> ures { /*! * * Unifies two simple variables. Because simple variables do @@ -194,13 +235,13 @@ impl InferCtxt { return uok(); } - pub fn simple_var_t>>(&mut self, - a_is_expected - : bool, - a_id: V, - b: T) - -> ures { + fn simple_var_t>>( + &mut self, + a_is_expected: bool, + a_id: V, + b: T) + -> ures { /*! * * Sets the value of the variable `a_id` to `b`. Because diff --git a/src/librustc/rustc.rs b/src/librustc/rustc.rs index a27838b5d11..957cf02ed77 100644 --- a/src/librustc/rustc.rs +++ b/src/librustc/rustc.rs @@ -80,6 +80,7 @@ pub mod middle { pub mod moves; pub mod entry; pub mod effect; + pub mod reachable; } pub mod front { diff --git a/src/librustpkg/rustpkg.rs b/src/librustpkg/rustpkg.rs index d70428e7338..ca13ba39d59 100644 --- a/src/librustpkg/rustpkg.rs +++ b/src/librustpkg/rustpkg.rs @@ -185,7 +185,20 @@ impl<'self> PkgScript<'self> { } -impl Ctx { +pub trait CtxMethods { + fn run(&self, cmd: &str, args: ~[~str]); + fn do_cmd(&self, _cmd: &str, _pkgname: &str); + fn build(&self, workspace: &Path, pkgid: &PkgId); + fn clean(&self, workspace: &Path, id: &PkgId); + fn info(&self); + fn install(&self, workspace: &Path, id: &PkgId); + fn prefer(&self, _id: &str, _vers: Option<~str>); + fn test(&self); + fn uninstall(&self, _id: &str, _vers: Option<~str>); + fn unprefer(&self, _id: &str, _vers: Option<~str>); +} + +impl CtxMethods for Ctx { fn run(&self, cmd: &str, args: ~[~str]) { match cmd { diff --git a/src/libstd/path.rs b/src/libstd/path.rs index 6059ba5cbdd..b3b696a9a60 100644 --- a/src/libstd/path.rs +++ b/src/libstd/path.rs @@ -335,8 +335,8 @@ mod stat { } } - -impl Path { +#[cfg(target_os = "win32")] +impl WindowsPath { pub fn stat(&self) -> Option { unsafe { do str::as_c_str(self.to_str()) |buf| { @@ -349,12 +349,35 @@ impl Path { } } - #[cfg(unix)] - pub fn lstat(&self) -> Option { + pub fn exists(&self) -> bool { + match self.stat() { + None => false, + Some(_) => true, + } + } + + pub fn get_size(&self) -> Option { + match self.stat() { + None => None, + Some(ref st) => Some(st.st_size as i64), + } + } + + pub fn get_mode(&self) -> Option { + match self.stat() { + None => None, + Some(ref st) => Some(st.st_mode as uint), + } + } +} + +#[cfg(not(target_os = "win32"))] +impl PosixPath { + pub fn stat(&self) -> Option { unsafe { - do str::as_c_str(self.to_str()) |buf| { + do str::as_c_str(self.to_str()) |buf| { let mut st = stat::arch::default_stat(); - match libc::lstat(buf, &mut st) { + match libc::stat(buf, &mut st) { 0 => Some(st), _ => None, } @@ -396,7 +419,7 @@ impl Path { #[cfg(target_os = "freebsd")] #[cfg(target_os = "linux")] #[cfg(target_os = "macos")] -impl Path { +impl PosixPath { pub fn get_atime(&self) -> Option<(i64, int)> { match self.stat() { None => None, @@ -428,9 +451,24 @@ impl Path { } } +#[cfg(unix)] +impl PosixPath { + pub fn lstat(&self) -> Option { + unsafe { + do str::as_c_str(self.to_str()) |buf| { + let mut st = stat::arch::default_stat(); + match libc::lstat(buf, &mut st) { + 0 => Some(st), + _ => None, + } + } + } + } +} + #[cfg(target_os = "freebsd")] #[cfg(target_os = "macos")] -impl Path { +impl PosixPath { pub fn get_birthtime(&self) -> Option<(i64, int)> { match self.stat() { None => None, @@ -443,7 +481,7 @@ impl Path { } #[cfg(target_os = "win32")] -impl Path { +impl WindowsPath { pub fn get_atime(&self) -> Option<(i64, int)> { match self.stat() { None => None, diff --git a/src/libstd/str.rs b/src/libstd/str.rs index 07a67ca3b1e..9c94f36fba3 100644 --- a/src/libstd/str.rs +++ b/src/libstd/str.rs @@ -859,7 +859,8 @@ pub mod raw { /// invalidated later. pub unsafe fn c_str_to_static_slice(s: *libc::c_char) -> &'static str { let s = s as *u8; - let mut (curr, len) = (s, 0u); + let mut curr = s; + let mut len = 0u; while *curr != 0u8 { len += 1u; curr = ptr::offset(s, len); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 265e9e444e9..8e1c51caf7c 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -205,7 +205,8 @@ pub enum def { def_struct(def_id), def_typaram_binder(node_id), /* struct, impl or trait with ty params */ def_region(node_id), - def_label(node_id) + def_label(node_id), + def_method(def_id /* method */, Option /* trait */), } @@ -1047,7 +1048,7 @@ pub struct trait_ref { pub enum visibility { public, private, inherited } impl visibility { - fn inherit_from(&self, parent_visibility: visibility) -> visibility { + pub fn inherit_from(&self, parent_visibility: visibility) -> visibility { match self { &inherited => parent_visibility, &public | &private => *self diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 4ffaba09061..6761736d2f3 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -59,7 +59,7 @@ pub fn def_id_of_def(d: def) -> def_id { def_fn(id, _) | def_static_method(id, _, _) | def_mod(id) | def_foreign_mod(id) | def_static(id, _) | def_variant(_, id) | def_ty(id) | def_ty_param(id, _) | - def_use(id) | def_struct(id) | def_trait(id) => { + def_use(id) | def_struct(id) | def_trait(id) | def_method(id, _) => { id } def_arg(id, _) | def_local(id, _) | def_self(id, _) | def_self_ty(id) diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index e003e2b27e9..04f62f35749 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -51,251 +51,3 @@ pub fn token_to_str(token: &token::Token) -> ~str { token::to_str(get_ident_interner(), token) } -impl Parser { - // convert a token to a string using self's reader - pub fn token_to_str(&self, token: &token::Token) -> ~str { - token::to_str(get_ident_interner(), token) - } - - // convert the current token to a string using self's reader - pub fn this_token_to_str(&self) -> ~str { - self.token_to_str(self.token) - } - - pub fn unexpected_last(&self, t: &token::Token) -> ! { - self.span_fatal( - *self.last_span, - fmt!( - "unexpected token: `%s`", - self.token_to_str(t) - ) - ); - } - - pub fn unexpected(&self) -> ! { - self.fatal( - fmt!( - "unexpected token: `%s`", - self.this_token_to_str() - ) - ); - } - - // expect and consume the token t. Signal an error if - // the next token is not t. - pub fn expect(&self, t: &token::Token) { - if *self.token == *t { - self.bump(); - } else { - self.fatal( - fmt!( - "expected `%s` but found `%s`", - self.token_to_str(t), - self.this_token_to_str() - ) - ) - } - } - - pub fn parse_ident(&self) -> ast::ident { - self.check_strict_keywords(); - self.check_reserved_keywords(); - match *self.token { - token::IDENT(i, _) => { - self.bump(); - i - } - token::INTERPOLATED(token::nt_ident(*)) => { - self.bug("ident interpolation not converted to real token"); - } - _ => { - self.fatal( - fmt!( - "expected ident, found `%s`", - self.this_token_to_str() - ) - ); - } - } - } - - pub fn parse_path_list_ident(&self) -> ast::path_list_ident { - let lo = self.span.lo; - let ident = self.parse_ident(); - let hi = self.last_span.hi; - spanned(lo, hi, ast::path_list_ident_ { name: ident, - id: self.get_id() }) - } - - // consume token 'tok' if it exists. Returns true if the given - // token was present, false otherwise. - pub fn eat(&self, tok: &token::Token) -> bool { - return if *self.token == *tok { self.bump(); true } else { false }; - } - - pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { - token::is_keyword(kw, self.token) - } - - // if the next token is the given keyword, eat it and return - // true. Otherwise, return false. - pub fn eat_keyword(&self, kw: keywords::Keyword) -> bool { - let is_kw = match *self.token { - token::IDENT(sid, false) => kw.to_ident().name == sid.name, - _ => false - }; - if is_kw { self.bump() } - is_kw - } - - // if the given word is not a keyword, signal an error. - // if the next token is not the given word, signal an error. - // otherwise, eat it. - pub fn expect_keyword(&self, kw: keywords::Keyword) { - if !self.eat_keyword(kw) { - self.fatal( - fmt!( - "expected `%s`, found `%s`", - self.id_to_str(kw.to_ident()), - self.this_token_to_str() - ) - ); - } - } - - // signal an error if the given string is a strict keyword - pub fn check_strict_keywords(&self) { - if token::is_strict_keyword(self.token) { - self.span_err(*self.last_span, - fmt!("found `%s` in ident position", self.this_token_to_str())); - } - } - - // signal an error if the current token is a reserved keyword - pub fn check_reserved_keywords(&self) { - if token::is_reserved_keyword(self.token) { - self.fatal(fmt!("`%s` is a reserved keyword", self.this_token_to_str())); - } - } - - // expect and consume a GT. if a >> is seen, replace it - // with a single > and continue. If a GT is not seen, - // signal an error. - pub fn expect_gt(&self) { - if *self.token == token::GT { - self.bump(); - } else if *self.token == token::BINOP(token::SHR) { - self.replace_token( - token::GT, - self.span.lo + BytePos(1u), - self.span.hi - ); - } else { - let mut s: ~str = ~"expected `"; - s.push_str(self.token_to_str(&token::GT)); - s.push_str("`, found `"); - s.push_str(self.this_token_to_str()); - s.push_str("`"); - self.fatal(s); - } - } - - // parse a sequence bracketed by '<' and '>', stopping - // before the '>'. - pub fn parse_seq_to_before_gt(&self, - sep: Option, - f: &fn(&Parser) -> T) - -> OptVec { - let mut first = true; - let mut v = opt_vec::Empty; - while *self.token != token::GT - && *self.token != token::BINOP(token::SHR) { - match sep { - Some(ref t) => { - if first { first = false; } - else { self.expect(t); } - } - _ => () - } - v.push(f(self)); - } - return v; - } - - pub fn parse_seq_to_gt(&self, - sep: Option, - f: &fn(&Parser) -> T) - -> OptVec { - let v = self.parse_seq_to_before_gt(sep, f); - self.expect_gt(); - return v; - } - - // parse a sequence, including the closing delimiter. The function - // f must consume tokens until reaching the next separator or - // closing bracket. - pub fn parse_seq_to_end(&self, - ket: &token::Token, - sep: SeqSep, - f: &fn(&Parser) -> T) - -> ~[T] { - let val = self.parse_seq_to_before_end(ket, sep, f); - self.bump(); - val - } - - // parse a sequence, not including the closing delimiter. The function - // f must consume tokens until reaching the next separator or - // closing bracket. - pub fn parse_seq_to_before_end(&self, - ket: &token::Token, - sep: SeqSep, - f: &fn(&Parser) -> T) - -> ~[T] { - let mut first: bool = true; - let mut v: ~[T] = ~[]; - while *self.token != *ket { - match sep.sep { - Some(ref t) => { - if first { first = false; } - else { self.expect(t); } - } - _ => () - } - if sep.trailing_sep_allowed && *self.token == *ket { break; } - v.push(f(self)); - } - return v; - } - - // parse a sequence, including the closing delimiter. The function - // f must consume tokens until reaching the next separator or - // closing bracket. - pub fn parse_unspanned_seq(&self, - bra: &token::Token, - ket: &token::Token, - sep: SeqSep, - f: &fn(&Parser) -> T) - -> ~[T] { - self.expect(bra); - let result = self.parse_seq_to_before_end(ket, sep, f); - self.bump(); - result - } - - // NB: Do not use this function unless you actually plan to place the - // spanned list in the AST. - pub fn parse_seq(&self, - bra: &token::Token, - ket: &token::Token, - sep: SeqSep, - f: &fn(&Parser) -> T) - -> spanned<~[T]> { - let lo = self.span.lo; - self.expect(bra); - let result = self.parse_seq_to_before_end(ket, sep, f); - let hi = self.span.hi; - self.bump(); - spanned(lo, hi, result) - } -} diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 7f2d06ee1e4..fff4c125af6 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -73,7 +73,26 @@ impl to_bytes::IterBytes for ObsoleteSyntax { } } -impl Parser { +pub trait ParserObsoleteMethods { + /// Reports an obsolete syntax non-fatal error. + fn obsolete(&self, sp: span, kind: ObsoleteSyntax); + // Reports an obsolete syntax non-fatal error, and returns + // a placeholder expression + fn obsolete_expr(&self, sp: span, kind: ObsoleteSyntax) -> @expr; + fn report(&self, + sp: span, + kind: ObsoleteSyntax, + kind_str: &str, + desc: &str); + fn token_is_obsolete_ident(&self, ident: &str, token: &Token) -> bool; + fn is_obsolete_ident(&self, ident: &str) -> bool; + fn eat_obsolete_ident(&self, ident: &str) -> bool; + fn try_parse_obsolete_struct_ctor(&self) -> bool; + fn try_parse_obsolete_with(&self) -> bool; + fn try_parse_obsolete_priv_section(&self, attrs: &[attribute]) -> bool; +} + +impl ParserObsoleteMethods for Parser { /// Reports an obsolete syntax non-fatal error. pub fn obsolete(&self, sp: span, kind: ObsoleteSyntax) { let (kind_str, desc) = match kind { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index a7c46d609ca..64bdfd4a4d3 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -64,7 +64,7 @@ use codemap::{span, BytePos, spanned, mk_sp}; use codemap; use parse::attr::parser_attr; use parse::classify; -use parse::common::{seq_sep_none}; +use parse::common::{SeqSep, seq_sep_none}; use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed}; use parse::lexer::reader; use parse::lexer::TokenAndSpan; @@ -84,7 +84,7 @@ use parse::obsolete::{ObsoletePurity, ObsoleteStaticMethod}; use parse::obsolete::{ObsoleteConstItem, ObsoleteFixedLengthVectorType}; use parse::obsolete::{ObsoleteNamedExternModule, ObsoleteMultipleLocalDecl}; use parse::obsolete::{ObsoleteMutWithMultipleBindings}; -use parse::obsolete::{ObsoletePatternCopyKeyword}; +use parse::obsolete::{ObsoletePatternCopyKeyword, ParserObsoleteMethods}; use parse::token::{can_begin_expr, get_ident_interner, ident_to_str, is_ident}; use parse::token::{is_ident_or_path}; use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents}; @@ -274,6 +274,253 @@ impl Drop for Parser { } impl Parser { + // convert a token to a string using self's reader + pub fn token_to_str(&self, token: &token::Token) -> ~str { + token::to_str(get_ident_interner(), token) + } + + // convert the current token to a string using self's reader + pub fn this_token_to_str(&self) -> ~str { + self.token_to_str(self.token) + } + + pub fn unexpected_last(&self, t: &token::Token) -> ! { + self.span_fatal( + *self.last_span, + fmt!( + "unexpected token: `%s`", + self.token_to_str(t) + ) + ); + } + + pub fn unexpected(&self) -> ! { + self.fatal( + fmt!( + "unexpected token: `%s`", + self.this_token_to_str() + ) + ); + } + + // expect and consume the token t. Signal an error if + // the next token is not t. + pub fn expect(&self, t: &token::Token) { + if *self.token == *t { + self.bump(); + } else { + self.fatal( + fmt!( + "expected `%s` but found `%s`", + self.token_to_str(t), + self.this_token_to_str() + ) + ) + } + } + + pub fn parse_ident(&self) -> ast::ident { + self.check_strict_keywords(); + self.check_reserved_keywords(); + match *self.token { + token::IDENT(i, _) => { + self.bump(); + i + } + token::INTERPOLATED(token::nt_ident(*)) => { + self.bug("ident interpolation not converted to real token"); + } + _ => { + self.fatal( + fmt!( + "expected ident, found `%s`", + self.this_token_to_str() + ) + ); + } + } + } + + pub fn parse_path_list_ident(&self) -> ast::path_list_ident { + let lo = self.span.lo; + let ident = self.parse_ident(); + let hi = self.last_span.hi; + spanned(lo, hi, ast::path_list_ident_ { name: ident, + id: self.get_id() }) + } + + // consume token 'tok' if it exists. Returns true if the given + // token was present, false otherwise. + pub fn eat(&self, tok: &token::Token) -> bool { + return if *self.token == *tok { self.bump(); true } else { false }; + } + + pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { + token::is_keyword(kw, self.token) + } + + // if the next token is the given keyword, eat it and return + // true. Otherwise, return false. + pub fn eat_keyword(&self, kw: keywords::Keyword) -> bool { + let is_kw = match *self.token { + token::IDENT(sid, false) => kw.to_ident().name == sid.name, + _ => false + }; + if is_kw { self.bump() } + is_kw + } + + // if the given word is not a keyword, signal an error. + // if the next token is not the given word, signal an error. + // otherwise, eat it. + pub fn expect_keyword(&self, kw: keywords::Keyword) { + if !self.eat_keyword(kw) { + self.fatal( + fmt!( + "expected `%s`, found `%s`", + *self.id_to_str(kw.to_ident()), + self.this_token_to_str() + ) + ); + } + } + + // signal an error if the given string is a strict keyword + pub fn check_strict_keywords(&self) { + if token::is_strict_keyword(self.token) { + self.span_err(*self.last_span, + fmt!("found `%s` in ident position", self.this_token_to_str())); + } + } + + // signal an error if the current token is a reserved keyword + pub fn check_reserved_keywords(&self) { + if token::is_reserved_keyword(self.token) { + self.fatal(fmt!("`%s` is a reserved keyword", self.this_token_to_str())); + } + } + + // expect and consume a GT. if a >> is seen, replace it + // with a single > and continue. If a GT is not seen, + // signal an error. + pub fn expect_gt(&self) { + if *self.token == token::GT { + self.bump(); + } else if *self.token == token::BINOP(token::SHR) { + self.replace_token( + token::GT, + self.span.lo + BytePos(1u), + self.span.hi + ); + } else { + let mut s: ~str = ~"expected `"; + s.push_str(self.token_to_str(&token::GT)); + s.push_str("`, found `"); + s.push_str(self.this_token_to_str()); + s.push_str("`"); + self.fatal(s); + } + } + + // parse a sequence bracketed by '<' and '>', stopping + // before the '>'. + pub fn parse_seq_to_before_gt(&self, + sep: Option, + f: &fn(&Parser) -> T) + -> OptVec { + let mut first = true; + let mut v = opt_vec::Empty; + while *self.token != token::GT + && *self.token != token::BINOP(token::SHR) { + match sep { + Some(ref t) => { + if first { first = false; } + else { self.expect(t); } + } + _ => () + } + v.push(f(self)); + } + return v; + } + + pub fn parse_seq_to_gt(&self, + sep: Option, + f: &fn(&Parser) -> T) + -> OptVec { + let v = self.parse_seq_to_before_gt(sep, f); + self.expect_gt(); + return v; + } + + // parse a sequence, including the closing delimiter. The function + // f must consume tokens until reaching the next separator or + // closing bracket. + pub fn parse_seq_to_end(&self, + ket: &token::Token, + sep: SeqSep, + f: &fn(&Parser) -> T) + -> ~[T] { + let val = self.parse_seq_to_before_end(ket, sep, f); + self.bump(); + val + } + + // parse a sequence, not including the closing delimiter. The function + // f must consume tokens until reaching the next separator or + // closing bracket. + pub fn parse_seq_to_before_end(&self, + ket: &token::Token, + sep: SeqSep, + f: &fn(&Parser) -> T) + -> ~[T] { + let mut first: bool = true; + let mut v: ~[T] = ~[]; + while *self.token != *ket { + match sep.sep { + Some(ref t) => { + if first { first = false; } + else { self.expect(t); } + } + _ => () + } + if sep.trailing_sep_allowed && *self.token == *ket { break; } + v.push(f(self)); + } + return v; + } + + // parse a sequence, including the closing delimiter. The function + // f must consume tokens until reaching the next separator or + // closing bracket. + pub fn parse_unspanned_seq(&self, + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: &fn(&Parser) -> T) + -> ~[T] { + self.expect(bra); + let result = self.parse_seq_to_before_end(ket, sep, f); + self.bump(); + result + } + + // NB: Do not use this function unless you actually plan to place the + // spanned list in the AST. + pub fn parse_seq(&self, + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: &fn(&Parser) -> T) + -> spanned<~[T]> { + let lo = self.span.lo; + self.expect(bra); + let result = self.parse_seq_to_before_end(ket, sep, f); + let hi = self.span.hi; + self.bump(); + spanned(lo, hi, result) + } + // advance the parser by one token pub fn bump(&self) { *self.last_span = copy *self.span;