librustc: Rewrite reachability and forbid duplicate methods in type implementations.

This should allow fewer symbols to be exported.
This commit is contained in:
Patrick Walton 2013-06-14 18:21:47 -07:00 committed by Corey Richardson
parent a1531ed946
commit 03ab6351cc
30 changed files with 1331 additions and 961 deletions

View File

@ -12,6 +12,8 @@
use core::prelude::*;
use core::str;
// Simple Extensible Binary Markup Language (ebml) reader and writer on a
// cursor model. See the specification here:
// http://www.matroska.org/technical/specs/rfc/index.html
@ -34,6 +36,20 @@ pub struct Doc {
end: uint,
}
impl Doc {
pub fn get(&self, tag: uint) -> Doc {
reader::get_doc(*self, tag)
}
pub fn as_str_slice<'a>(&'a self) -> &'a str {
str::from_bytes_slice(self.data.slice(self.start, self.end))
}
pub fn as_str(&self) -> ~str {
self.as_str_slice().to_owned()
}
}
pub struct TaggedDoc {
tag: uint,
doc: Doc,
@ -94,20 +110,6 @@ pub mod reader {
// ebml reading
impl Doc {
pub fn get(&self, tag: uint) -> Doc {
get_doc(*self, tag)
}
pub fn as_str_slice<'a>(&'a self) -> &'a str {
str::from_bytes_slice(self.data.slice(self.start, self.end))
}
pub fn as_str(&self) -> ~str {
self.as_str_slice().to_owned()
}
}
struct Res {
val: uint,
next: uint

View File

@ -153,7 +153,7 @@ impl Sem<()> {
#[doc(hidden)]
impl Sem<~[Waitqueue]> {
pub fn access<U>(&self, blk: &fn() -> U) -> U {
pub fn access_waitqueue<U>(&self, blk: &fn() -> U) -> U {
let mut release = None;
unsafe {
do task::unkillable {
@ -456,7 +456,9 @@ impl Clone for Mutex {
impl Mutex {
/// Run a function with ownership of the mutex.
pub fn lock<U>(&self, blk: &fn() -> U) -> U { (&self.sem).access(blk) }
pub fn lock<U>(&self, blk: &fn() -> U) -> U {
(&self.sem).access_waitqueue(blk)
}
/// Run a function with ownership of the mutex and a handle to a condvar.
pub fn lock_cond<U>(&self, blk: &fn(c: &Condvar) -> U) -> U {
@ -559,7 +561,7 @@ impl RWlock {
unsafe {
do task::unkillable {
(&self.order_lock).acquire();
do (&self.access_lock).access {
do (&self.access_lock).access_waitqueue {
(&self.order_lock).release();
task::rekillable(blk)
}

View File

@ -19,15 +19,21 @@ use core::prelude::*;
use getopts;
use sort;
use stats::Stats;
use term;
use time::precise_time_ns;
use core::comm::{stream, SharedChan};
use core::either;
use core::io;
use core::num;
use core::option;
use core::rand::RngUtil;
use core::rand;
use core::result;
use core::task;
use core::to_str::ToStr;
use core::u64;
use core::uint;
use core::vec;
@ -609,152 +615,146 @@ fn calc_result(desc: &TestDesc, task_succeeded: bool) -> TestResult {
}
}
impl BenchHarness {
/// Callback for benchmark functions to run in their body.
pub fn iter(&mut self, inner:&fn()) {
self.ns_start = precise_time_ns();
let k = self.iterations;
for u64::range(0, k) |_| {
inner();
}
self.ns_end = precise_time_ns();
}
pub fn ns_elapsed(&mut self) -> u64 {
if self.ns_start == 0 || self.ns_end == 0 {
0
} else {
self.ns_end - self.ns_start
}
}
pub fn ns_per_iter(&mut self) -> u64 {
if self.iterations == 0 {
0
} else {
self.ns_elapsed() / self.iterations
}
}
pub fn bench_n(&mut self, n: u64, f: &fn(&mut BenchHarness)) {
self.iterations = n;
debug!("running benchmark for %u iterations",
n as uint);
f(self);
}
// This is the Go benchmark algorithm. It produces a single
// datapoint and always tries to run for 1s.
pub fn go_bench(&mut self, f: &fn(&mut BenchHarness)) {
// Rounds a number down to the nearest power of 10.
fn round_down_10(n: u64) -> u64 {
let mut n = n;
let mut res = 1;
while n > 10 {
n = n / 10;
res *= 10;
}
res
}
// Rounds x up to a number of the form [1eX, 2eX, 5eX].
fn round_up(n: u64) -> u64 {
let base = round_down_10(n);
if n < (2 * base) {
2 * base
} else if n < (5 * base) {
5 * base
} else {
10 * base
}
}
// Initial bench run to get ballpark figure.
let mut n = 1_u64;
self.bench_n(n, f);
while n < 1_000_000_000 &&
self.ns_elapsed() < 1_000_000_000 {
let last = n;
// Try to estimate iter count for 1s falling back to 1bn
// iterations if first run took < 1ns.
if self.ns_per_iter() == 0 {
n = 1_000_000_000;
} else {
n = 1_000_000_000 / self.ns_per_iter();
}
n = u64::max(u64::min(n+n/2, 100*last), last+1);
n = round_up(n);
self.bench_n(n, f);
}
}
// This is a more statistics-driven benchmark algorithm.
// It stops as quickly as 50ms, so long as the statistical
// properties are satisfactory. If those properties are
// not met, it may run as long as the Go algorithm.
pub fn auto_bench(&mut self, f: &fn(&mut BenchHarness)) -> ~[f64] {
let mut rng = rand::rng();
let mut magnitude = 10;
let mut prev_madp = 0.0;
loop {
let n_samples = rng.gen_uint_range(50, 60);
let n_iter = rng.gen_uint_range(magnitude,
magnitude * 2);
let samples = do vec::from_fn(n_samples) |_| {
self.bench_n(n_iter as u64, f);
self.ns_per_iter() as f64
};
// Eliminate outliers
let med = samples.median();
let mad = samples.median_abs_dev();
let samples = do vec::filter(samples) |f| {
num::abs(*f - med) <= 3.0 * mad
};
debug!("%u samples, median %f, MAD=%f, %u survived filter",
n_samples, med as float, mad as float,
samples.len());
if samples.len() != 0 {
// If we have _any_ cluster of signal...
let curr_madp = samples.median_abs_dev_pct();
if self.ns_elapsed() > 1_000_000 &&
(curr_madp < 1.0 ||
num::abs(curr_madp - prev_madp) < 0.1) {
return samples;
}
prev_madp = curr_madp;
if n_iter > 20_000_000 ||
self.ns_elapsed() > 20_000_000 {
return samples;
}
}
magnitude *= 2;
}
}
}
pub mod bench {
use core::prelude::*;
use core::num;
use core::rand::RngUtil;
use core::rand;
use core::u64;
use core::vec;
use stats::Stats;
use test::{BenchHarness, BenchSamples};
use time::precise_time_ns;
impl BenchHarness {
/// Callback for benchmark functions to run in their body.
pub fn iter(&mut self, inner:&fn()) {
self.ns_start = precise_time_ns();
let k = self.iterations;
for u64::range(0, k) |_| {
inner();
}
self.ns_end = precise_time_ns();
}
pub fn ns_elapsed(&mut self) -> u64 {
if self.ns_start == 0 || self.ns_end == 0 {
0
} else {
self.ns_end - self.ns_start
}
}
pub fn ns_per_iter(&mut self) -> u64 {
if self.iterations == 0 {
0
} else {
self.ns_elapsed() / self.iterations
}
}
pub fn bench_n(&mut self, n: u64, f: &fn(&mut BenchHarness)) {
self.iterations = n;
debug!("running benchmark for %u iterations",
n as uint);
f(self);
}
// This is the Go benchmark algorithm. It produces a single
// datapoint and always tries to run for 1s.
pub fn go_bench(&mut self, f: &fn(&mut BenchHarness)) {
// Rounds a number down to the nearest power of 10.
fn round_down_10(n: u64) -> u64 {
let mut n = n;
let mut res = 1;
while n > 10 {
n = n / 10;
res *= 10;
}
res
}
// Rounds x up to a number of the form [1eX, 2eX, 5eX].
fn round_up(n: u64) -> u64 {
let base = round_down_10(n);
if n < (2 * base) {
2 * base
} else if n < (5 * base) {
5 * base
} else {
10 * base
}
}
// Initial bench run to get ballpark figure.
let mut n = 1_u64;
self.bench_n(n, f);
while n < 1_000_000_000 &&
self.ns_elapsed() < 1_000_000_000 {
let last = n;
// Try to estimate iter count for 1s falling back to 1bn
// iterations if first run took < 1ns.
if self.ns_per_iter() == 0 {
n = 1_000_000_000;
} else {
n = 1_000_000_000 / self.ns_per_iter();
}
n = u64::max(u64::min(n+n/2, 100*last), last+1);
n = round_up(n);
self.bench_n(n, f);
}
}
// This is a more statistics-driven benchmark algorithm.
// It stops as quickly as 50ms, so long as the statistical
// properties are satisfactory. If those properties are
// not met, it may run as long as the Go algorithm.
pub fn auto_bench(&mut self, f: &fn(&mut BenchHarness)) -> ~[f64] {
let mut rng = rand::rng();
let mut magnitude = 10;
let mut prev_madp = 0.0;
loop {
let n_samples = rng.gen_uint_range(50, 60);
let n_iter = rng.gen_uint_range(magnitude,
magnitude * 2);
let samples = do vec::from_fn(n_samples) |_| {
self.bench_n(n_iter as u64, f);
self.ns_per_iter() as f64
};
// Eliminate outliers
let med = samples.median();
let mad = samples.median_abs_dev();
let samples = do vec::filter(samples) |f| {
num::abs(*f - med) <= 3.0 * mad
};
debug!("%u samples, median %f, MAD=%f, %u survived filter",
n_samples, med as float, mad as float,
samples.len());
if samples.len() != 0 {
// If we have _any_ cluster of signal...
let curr_madp = samples.median_abs_dev_pct();
if self.ns_elapsed() > 1_000_000 &&
(curr_madp < 1.0 ||
num::abs(curr_madp - prev_madp) < 0.1) {
return samples;
}
prev_madp = curr_madp;
if n_iter > 20_000_000 ||
self.ns_elapsed() > 20_000_000 {
return samples;
}
}
magnitude *= 2;
}
}
}
pub fn benchmark(f: &fn(&mut BenchHarness)) -> BenchSamples {

View File

@ -19,7 +19,7 @@ use front;
use lib::llvm::llvm;
use metadata::{creader, cstore, filesearch};
use metadata;
use middle::{trans, freevars, kind, ty, typeck, lint, astencode};
use middle::{trans, freevars, kind, ty, typeck, lint, astencode, reachable};
use middle;
use util::common::time;
use util::ppaux;
@ -299,10 +299,16 @@ pub fn compile_rest(sess: Session,
time(time_passes, ~"kind checking", ||
kind::check_crate(ty_cx, method_map, crate));
let reachable_map =
time(time_passes, ~"reachability checking", ||
reachable::find_reachable(ty_cx, method_map, crate));
time(time_passes, ~"lint checking", ||
lint::check_crate(ty_cx, crate));
if phases.to == cu_no_trans { return (Some(crate), Some(ty_cx)); }
if phases.to == cu_no_trans {
return (Some(crate), Some(ty_cx));
}
let maps = astencode::Maps {
root_map: root_map,
@ -315,9 +321,13 @@ pub fn compile_rest(sess: Session,
let outputs = outputs.get_ref();
time(time_passes, ~"translation", ||
trans::base::trans_crate(sess, crate, ty_cx,
trans::base::trans_crate(sess,
crate,
ty_cx,
&outputs.obj_filename,
exp_map2, maps))
exp_map2,
reachable_map,
maps))
};
let outputs = outputs.get_ref();

View File

@ -16,7 +16,6 @@ use metadata::common::*;
use metadata::cstore;
use metadata::decoder;
use metadata::tyencode;
use middle::trans::reachable;
use middle::ty::node_id_to_type;
use middle::ty;
use middle;
@ -60,7 +59,6 @@ pub type encode_inlined_item<'self> = &'self fn(ecx: &EncodeContext,
pub struct EncodeParams<'self> {
diag: @span_handler,
tcx: ty::ctxt,
reachable: reachable::map,
reexports2: middle::resolve::ExportMap2,
item_symbols: &'self HashMap<ast::node_id, ~str>,
discrim_symbols: &'self HashMap<ast::node_id, @str>,
@ -87,7 +85,6 @@ pub struct EncodeContext<'self> {
diag: @span_handler,
tcx: ty::ctxt,
stats: @mut Stats,
reachable: reachable::map,
reexports2: middle::resolve::ExportMap2,
item_symbols: &'self HashMap<ast::node_id, ~str>,
discrim_symbols: &'self HashMap<ast::node_id, @str>,
@ -157,8 +154,8 @@ fn encode_trait_ref(ebml_w: &mut writer::Encoder,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
ebml_w.start_tag(tag);
tyencode::enc_trait_ref(ebml_w.writer, ty_str_ctxt, trait_ref);
@ -185,8 +182,8 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
for params.iter().advance |param| {
ebml_w.start_tag(tag);
tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param);
@ -218,8 +215,8 @@ pub fn write_type(ecx: &EncodeContext,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
tyencode::enc_ty(ebml_w.writer, ty_str_ctxt, typ);
}
@ -231,8 +228,8 @@ pub fn write_vstore(ecx: &EncodeContext,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
tyencode::enc_vstore(ebml_w.writer, ty_str_ctxt, vstore);
}
@ -264,8 +261,8 @@ fn encode_method_fty(ecx: &EncodeContext,
diag: ecx.diag,
ds: def_to_str,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
};
tyencode::enc_bare_fn_ty(ebml_w.writer, ty_str_ctxt, typ);
ebml_w.end_tag();
@ -780,13 +777,6 @@ fn encode_info_for_item(ecx: &EncodeContext,
index: @mut ~[entry<int>],
path: &[ast_map::path_elt]) {
let tcx = ecx.tcx;
let must_write =
match item.node {
item_enum(_, _) | item_impl(*) | item_trait(*) | item_struct(*) |
item_mod(*) | item_foreign_mod(*) | item_static(*) => true,
_ => false
};
if !must_write && !reachable(ecx, item.id) { return; }
fn add_to_index_(item: @item, ebml_w: &writer::Encoder,
index: @mut ~[entry<int>]) {
@ -898,23 +888,6 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_type_param_bounds(ebml_w, ecx, &generics.ty_params);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
// If this is a tuple- or enum-like struct, encode the type of the
// constructor.
if struct_def.fields.len() > 0 &&
struct_def.fields[0].node.kind == ast::unnamed_field {
let ctor_id = match struct_def.ctor_id {
Some(ctor_id) => ctor_id,
None => ecx.tcx.sess.bug("struct def didn't have ctor id"),
};
encode_info_for_struct_ctor(ecx,
ebml_w,
path,
item.ident,
ctor_id,
index);
}
encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs);
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
@ -944,6 +917,23 @@ fn encode_info_for_item(ecx: &EncodeContext,
let bkts = create_index(idx);
encode_index(ebml_w, bkts, write_int);
ebml_w.end_tag();
// If this is a tuple- or enum-like struct, encode the type of the
// constructor.
if struct_def.fields.len() > 0 &&
struct_def.fields[0].node.kind == ast::unnamed_field {
let ctor_id = match struct_def.ctor_id {
Some(ctor_id) => ctor_id,
None => ecx.tcx.sess.bug("struct def didn't have ctor id"),
};
encode_info_for_struct_ctor(ecx,
ebml_w,
path,
item.ident,
ctor_id,
index);
}
}
item_impl(ref generics, opt_trait, ty, ref methods) => {
add_to_index();
@ -1092,7 +1082,6 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext,
index: @mut ~[entry<int>],
path: ast_map::path,
abi: AbiSet) {
if !reachable(ecx, nitem.id) { return; }
index.push(entry { val: nitem.id, pos: ebml_w.writer.tell() });
ebml_w.start_tag(tag_items_data_item);
@ -1162,6 +1151,12 @@ fn encode_info_for_items(ecx: &EncodeContext,
visit::visit_foreign_item(ni, (cx, v));
match items.get_copy(&ni.id) {
ast_map::node_foreign_item(_, abi, _, pt) => {
debug!("writing foreign item %s::%s",
ast_map::path_to_str(
*pt,
token::get_ident_interner()),
*token::ident_to_str(&ni.ident));
let mut ebml_w = copy ebml_w;
// See above
let ecx : &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
@ -1466,7 +1461,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
total_bytes: 0,
n_inlines: 0
};
let EncodeParams{item_symbols, diag, tcx, reachable, reexports2,
let EncodeParams{item_symbols, diag, tcx, reexports2,
discrim_symbols, cstore, encode_inlined_item,
link_meta, _} = parms;
let type_abbrevs = @mut HashMap::new();
@ -1475,7 +1470,6 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
diag: diag,
tcx: tcx,
stats: stats,
reachable: reachable,
reexports2: reexports2,
item_symbols: item_symbols,
discrim_symbols: discrim_symbols,
@ -1557,7 +1551,6 @@ pub fn encoded_ty(tcx: ty::ctxt, t: ty::t) -> ~str {
diag: tcx.diag,
ds: def_to_str,
tcx: tcx,
reachable: |_id| false,
abbrevs: tyencode::ac_no_abbrevs};
do io::with_str_writer |wr| {
tyencode::enc_ty(wr, cx, t);

View File

@ -31,7 +31,6 @@ pub struct ctxt {
ds: @fn(def_id) -> ~str,
// The type context.
tcx: ty::ctxt,
reachable: @fn(node_id) -> bool,
abbrevs: abbrev_ctxt
}

View File

@ -368,14 +368,17 @@ impl tr for ast::def {
ast::def_static_method(did.tr(xcx),
did2_opt.map(|did2| did2.tr(xcx)),
p)
},
ast::def_self_ty(nid) => ast::def_self_ty(xcx.tr_id(nid)),
ast::def_self(nid, i) => ast::def_self(xcx.tr_id(nid), i),
ast::def_mod(did) => ast::def_mod(did.tr(xcx)),
ast::def_foreign_mod(did) => ast::def_foreign_mod(did.tr(xcx)),
ast::def_static(did, m) => ast::def_static(did.tr(xcx), m),
ast::def_arg(nid, b) => ast::def_arg(xcx.tr_id(nid), b),
ast::def_local(nid, b) => ast::def_local(xcx.tr_id(nid), b),
}
ast::def_method(did0, did1) => {
ast::def_method(did0.tr(xcx), did1.map(|did1| did1.tr(xcx)))
}
ast::def_self_ty(nid) => { ast::def_self_ty(xcx.tr_id(nid)) }
ast::def_self(nid, i) => { ast::def_self(xcx.tr_id(nid), i) }
ast::def_mod(did) => { ast::def_mod(did.tr(xcx)) }
ast::def_foreign_mod(did) => { ast::def_foreign_mod(did.tr(xcx)) }
ast::def_static(did, m) => { ast::def_static(did.tr(xcx), m) }
ast::def_arg(nid, b) => { ast::def_arg(xcx.tr_id(nid), b) }
ast::def_local(nid, b) => { ast::def_local(xcx.tr_id(nid), b) }
ast::def_variant(e_did, v_did) => {
ast::def_variant(e_did.tr(xcx), v_did.tr(xcx))
},
@ -692,12 +695,12 @@ trait get_ty_str_ctxt {
impl<'self> get_ty_str_ctxt for e::EncodeContext<'self> {
fn ty_str_ctxt(&self) -> @tyencode::ctxt {
let r = self.reachable;
@tyencode::ctxt {diag: self.tcx.sess.diagnostic(),
ds: e::def_to_str,
tcx: self.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(self.type_abbrevs)}
@tyencode::ctxt {
diag: self.tcx.sess.diagnostic(),
ds: e::def_to_str,
tcx: self.tcx,
abbrevs: tyencode::ac_use_abbrevs(self.type_abbrevs)
}
}
}

View File

@ -0,0 +1,414 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Finds items that are externally reachable, to determine which items
// need to have their metadata (and possibly their AST) serialized.
// All items that can be referred to through an exported name are
// reachable, and when a reachable thing is inline or generic, it
// makes all other generics or inline functions that it references
// reachable as well.
use core::prelude::*;
use core::iterator::IteratorUtil;
use middle::resolve;
use middle::ty;
use middle::typeck;
use core::hashmap::HashSet;
use syntax::ast::*;
use syntax::ast;
use syntax::ast_map;
use syntax::ast_util::def_id_of_def;
use syntax::attr;
use syntax::codemap;
use syntax::parse::token;
use syntax::visit::Visitor;
use syntax::visit;
// Returns true if the given set of attributes contains the `#[inline]`
// attribute.
fn attributes_specify_inlining(attrs: &[attribute]) -> bool {
attr::attrs_contains_name(attrs, "inline")
}
// Returns true if the given set of generics implies that the item it's
// associated with must be inlined.
fn generics_require_inlining(generics: &Generics) -> bool {
!generics.ty_params.is_empty()
}
// Returns true if the given item must be inlined because it may be
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
fn item_might_be_inlined(item: @item) -> bool {
if attributes_specify_inlining(item.attrs) {
return true
}
match item.node {
item_fn(_, _, _, ref generics, _) => {
generics_require_inlining(generics)
}
_ => false,
}
}
// Returns true if the given type method must be inlined because it may be
// monomorphized or it was marked with `#[inline]`.
fn ty_method_might_be_inlined(ty_method: &ty_method) -> bool {
attributes_specify_inlining(ty_method.attrs) ||
generics_require_inlining(&ty_method.generics)
}
// Returns true if the given trait method must be inlined because it may be
// monomorphized or it was marked with `#[inline]`.
fn trait_method_might_be_inlined(trait_method: &trait_method) -> bool {
match *trait_method {
required(ref ty_method) => ty_method_might_be_inlined(ty_method),
provided(_) => true
}
}
// Information needed while computing reachability.
struct ReachableContext {
// The type context.
tcx: ty::ctxt,
// The method map, which links node IDs of method call expressions to the
// methods they've been resolved to.
method_map: typeck::method_map,
// The set of items which must be exported in the linkage sense.
reachable_symbols: @mut HashSet<node_id>,
// A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references.
worklist: @mut ~[node_id],
}
impl ReachableContext {
// Creates a new reachability computation context.
fn new(tcx: ty::ctxt, method_map: typeck::method_map)
-> ReachableContext {
ReachableContext {
tcx: tcx,
method_map: method_map,
reachable_symbols: @mut HashSet::new(),
worklist: @mut ~[],
}
}
// Step 1: Mark all public symbols, and add all public symbols that might
// be inlined to a worklist.
fn mark_public_symbols(&self, crate: @crate) {
let reachable_symbols = self.reachable_symbols;
let worklist = self.worklist;
let visitor = visit::mk_vt(@Visitor {
visit_item: |item, _, visitor| {
match item.node {
item_fn(*) => {
reachable_symbols.insert(item.id);
if item_might_be_inlined(item) {
worklist.push(item.id)
}
}
item_struct(ref struct_def, _) => {
match struct_def.ctor_id {
None => {}
Some(ctor_id) => {
reachable_symbols.insert(ctor_id);
}
}
}
item_enum(ref enum_def, _) => {
for enum_def.variants.each |variant| {
reachable_symbols.insert(variant.node.id);
}
}
item_impl(ref generics, trait_ref, _, ref methods) => {
// XXX(pcwalton): We conservatively assume any methods
// on a trait implementation are reachable, when this
// is not the case. We could be more precise by only
// treating implementations of reachable or cross-
// crate traits as reachable.
// Mark all public methods as reachable.
for methods.each |method| {
if method.vis == public || trait_ref.is_some() {
reachable_symbols.insert(method.id);
}
}
if generics_require_inlining(generics) {
// If the impl itself has generics, add all public
// symbols to the worklist.
for methods.each |method| {
if method.vis == public ||
trait_ref.is_some() {
worklist.push(method.id)
}
}
} else {
// Otherwise, add only public methods that have
// generics to the worklist.
for methods.each |method| {
let generics = &method.generics;
let attrs = &method.attrs;
if generics_require_inlining(generics) ||
attributes_specify_inlining(*attrs) ||
method.vis == public ||
trait_ref.is_some() {
worklist.push(method.id)
}
}
}
}
item_trait(_, _, ref trait_methods) => {
// Mark all provided methods as reachable.
for trait_methods.each |trait_method| {
match *trait_method {
provided(method) => {
reachable_symbols.insert(method.id);
worklist.push(method.id)
}
required(_) => {}
}
}
}
_ => {}
}
if item.vis == public {
visit::visit_item(item, (), visitor)
}
},
.. *visit::default_visitor()
});
visit::visit_crate(crate, (), visitor)
}
// Returns true if the given def ID represents a local item that is
// eligible for inlining and false otherwise.
fn def_id_represents_local_inlined_item(tcx: ty::ctxt, def_id: def_id)
-> bool {
if def_id.crate != local_crate {
return false
}
let node_id = def_id.node;
match tcx.items.find(&node_id) {
Some(&ast_map::node_item(item, _)) => {
match item.node {
item_fn(*) => item_might_be_inlined(item),
_ => false,
}
}
Some(&ast_map::node_trait_method(trait_method, _, _)) => {
match *trait_method {
required(_) => false,
provided(_) => true,
}
}
Some(&ast_map::node_method(method, impl_did, _)) => {
if generics_require_inlining(&method.generics) ||
attributes_specify_inlining(method.attrs) {
true
} else {
// Check the impl. If the generics on the self type of the
// impl require inlining, this method does too.
assert!(impl_did.crate == local_crate);
match tcx.items.find(&impl_did.node) {
Some(&ast_map::node_item(item, _)) => {
match item.node {
item_impl(ref generics, _, _, _) => {
generics_require_inlining(generics)
}
_ => false
}
}
Some(_) => {
tcx.sess.span_bug(method.span,
"method is not inside an \
impl?!")
}
None => {
tcx.sess.span_bug(method.span,
"the impl that this method is \
supposedly inside of doesn't \
exist in the AST map?!")
}
}
}
}
Some(_) => false,
None => tcx.sess.bug("def ID not in def map?!"),
}
}
// Helper function to set up a visitor for `propagate()` below.
fn init_visitor(&self) -> visit::vt<()> {
let (worklist, method_map) = (self.worklist, self.method_map);
let (tcx, reachable_symbols) = (self.tcx, self.reachable_symbols);
visit::mk_vt(@visit::Visitor {
visit_expr: |expr, _, visitor| {
match expr.node {
expr_path(_) => {
let def = match tcx.def_map.find(&expr.id) {
Some(&def) => def,
None => {
tcx.sess.span_bug(expr.span,
"def ID not in def map?!")
}
};
let def_id = def_id_of_def(def);
if ReachableContext::
def_id_represents_local_inlined_item(tcx,
def_id) {
worklist.push(def_id.node)
}
reachable_symbols.insert(def_id.node);
}
expr_method_call(*) => {
match method_map.find(&expr.id) {
Some(&typeck::method_map_entry {
origin: typeck::method_static(def_id),
_
}) => {
if ReachableContext::
def_id_represents_local_inlined_item(
tcx,
def_id) {
worklist.push(def_id.node)
}
reachable_symbols.insert(def_id.node);
}
Some(_) => {}
None => {
tcx.sess.span_bug(expr.span,
"method call expression \
not in method map?!")
}
}
}
_ => {}
}
visit::visit_expr(expr, (), visitor)
},
..*visit::default_visitor()
})
}
// Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&self) {
let visitor = self.init_visitor();
let mut scanned = HashSet::new();
while self.worklist.len() > 0 {
let search_item = self.worklist.pop();
if scanned.contains(&search_item) {
loop
}
scanned.insert(search_item);
self.reachable_symbols.insert(search_item);
// Find the AST block corresponding to the item and visit it,
// marking all path expressions that resolve to something
// interesting.
match self.tcx.items.find(&search_item) {
Some(&ast_map::node_item(item, _)) => {
match item.node {
item_fn(_, _, _, _, ref search_block) => {
visit::visit_block(search_block, (), visitor)
}
_ => {
self.tcx.sess.span_bug(item.span,
"found non-function item \
in worklist?!")
}
}
}
Some(&ast_map::node_trait_method(trait_method, _, _)) => {
match *trait_method {
required(ref ty_method) => {
self.tcx.sess.span_bug(ty_method.span,
"found required method in \
worklist?!")
}
provided(ref method) => {
visit::visit_block(&method.body, (), visitor)
}
}
}
Some(&ast_map::node_method(ref method, _, _)) => {
visit::visit_block(&method.body, (), visitor)
}
Some(_) => {
let ident_interner = token::get_ident_interner();
let desc = ast_map::node_id_to_str(self.tcx.items,
search_item,
ident_interner);
self.tcx.sess.bug(fmt!("found unexpected thingy in \
worklist: %s",
desc))
}
None => {
self.tcx.sess.bug(fmt!("found unmapped ID in worklist: \
%d",
search_item))
}
}
}
}
// Step 3: Mark all destructors as reachable.
//
// XXX(pcwalton): This is a conservative overapproximation, but fixing
// this properly would result in the necessity of computing *type*
// reachability, which might result in a compile time loss.
fn mark_destructors_reachable(&self) {
for self.tcx.destructor_for_type.each |_, destructor_def_id| {
if destructor_def_id.crate == local_crate {
self.reachable_symbols.insert(destructor_def_id.node);
}
}
}
}
pub fn find_reachable(tcx: ty::ctxt,
method_map: typeck::method_map,
crate: @crate)
-> @mut HashSet<node_id> {
// XXX(pcwalton): We only need to mark symbols that are exported. But this
// is more complicated than just looking at whether the symbol is `pub`,
// because it might be the target of a `pub use` somewhere. For now, I
// think we are fine, because you can't `pub use` something that wasn't
// exported due to the bug whereby `use` only looks through public
// modules even if you're inside the module the `use` appears in. When
// this bug is fixed, however, this code will need to be updated. Probably
// the easiest way to fix this (although a conservative overapproximation)
// is to have the name resolution pass mark all targets of a `pub use` as
// "must be reachable".
let reachable_context = ReachableContext::new(tcx, method_map);
// Step 1: Mark all public symbols, and add all public symbols that might
// be inlined to a worklist.
reachable_context.mark_public_symbols(crate);
// Step 2: Mark all symbols that the symbols on the worklist touch.
reachable_context.propagate();
// Step 3: Mark all destructors as reachable.
reachable_context.mark_destructors_reachable();
// Return the set of reachable symbols.
reachable_context.reachable_symbols
}

View File

@ -652,21 +652,9 @@ impl NameBindings {
match self.type_def {
None => None,
Some(ref type_def) => {
// FIXME (#3784): This is reallllly questionable.
// Perhaps the right thing to do is to merge def_mod
// and def_ty.
match (*type_def).type_def {
Some(type_def) => Some(type_def),
None => {
match (*type_def).module_def {
Some(module_def) => {
let module_def = &mut *module_def;
module_def.def_id.map(|def_id|
def_mod(*def_id))
}
None => None
}
}
None => None,
}
}
}
@ -1230,49 +1218,29 @@ impl Resolver {
visit_item(item, (new_parent, visitor));
}
item_impl(_, trait_ref_opt, ty, ref methods) => {
// If this implements an anonymous trait and it has static
// methods, then add all the static methods within to a new
// module, if the type was defined within this module.
item_impl(_, None, ty, ref methods) => {
// If this implements an anonymous trait, then add all the
// methods within to a new module, if the type was defined
// within this module.
//
// FIXME (#3785): This is quite unsatisfactory. Perhaps we
// should modify anonymous traits to only be implementable in
// the same module that declared the type.
// Bail out early if there are no static methods.
let mut methods_seen = HashMap::new();
let mut has_static_methods = false;
for methods.iter().advance |method| {
match method.explicit_self.node {
sty_static => has_static_methods = true,
_ => {
// Make sure you can't define duplicate methods
let ident = method.ident;
let span = method.span;
let old_sp = methods_seen.find_or_insert(ident, span);
if *old_sp != span {
self.session.span_err(span,
fmt!("duplicate definition of method `%s`",
self.session.str_of(ident)));
self.session.span_note(*old_sp,
fmt!("first definition of method `%s` here",
self.session.str_of(ident)));
}
}
}
}
// If there are static methods, then create the module
// and add them.
match (trait_ref_opt, ty) {
(None, @Ty { node: ty_path(path, _, _), _ }) if
has_static_methods && path.idents.len() == 1 => {
// Create the module and add all methods.
match *ty {
Ty {
node: ty_path(path, _),
_
} if path.idents.len() == 1 => {
let name = path_to_ident(path);
let new_parent = match parent.children.find(&name) {
// It already exists
Some(&child) if child.get_module_if_available().is_some() &&
child.get_module().kind == ImplModuleKind => {
Some(&child) if child.get_module_if_available()
.is_some() &&
child.get_module().kind ==
ImplModuleKind => {
ModuleReducedGraphParent(child.get_module())
}
// Create the module
@ -1283,8 +1251,8 @@ impl Resolver {
ForbidDuplicateModules,
sp);
let parent_link = self.get_parent_link(new_parent,
ident);
let parent_link =
self.get_parent_link(new_parent, ident);
let def_id = local_def(item.id);
name_bindings.define_module(Public,
parent_link,
@ -1292,30 +1260,36 @@ impl Resolver {
ImplModuleKind,
sp);
ModuleReducedGraphParent(name_bindings.get_module())
ModuleReducedGraphParent(
name_bindings.get_module())
}
};
// For each static method...
// For each method...
for methods.iter().advance |method| {
match method.explicit_self.node {
// Add the method to the module.
let ident = method.ident;
let (method_name_bindings, _) =
self.add_child(ident,
new_parent,
ForbidDuplicateValues,
method.span);
let def = match method.explicit_self.node {
sty_static => {
// Add the static method to the
// module.
let ident = method.ident;
let (method_name_bindings, _) =
self.add_child(
ident,
new_parent,
ForbidDuplicateValues,
method.span);
let def = def_fn(local_def(method.id),
method.purity);
method_name_bindings.define_value(
Public, def, method.span);
// Static methods become `def_fn`s.
def_fn(local_def(method.id),
method.purity)
}
_ => {}
}
_ => {
// Non-static methods become
// `def_method`s.
def_method(local_def(method.id), None)
}
};
method_name_bindings.define_value(Public,
def,
method.span);
}
}
_ => {}
@ -1324,41 +1298,23 @@ impl Resolver {
visit_item(item, (parent, visitor));
}
item_impl(_, Some(_), ty, ref methods) => {
visit_item(item, parent, visitor);
}
item_trait(_, _, ref methods) => {
let (name_bindings, new_parent) =
self.add_child(ident, parent, ForbidDuplicateTypes, sp);
// If the trait has static methods, then add all the static
// methods within to a new module.
//
// We only need to create the module if the trait has static
// methods, so check that first.
let mut has_static_methods = false;
for (*methods).iter().advance |method| {
let ty_m = trait_method_to_ty_method(method);
match ty_m.explicit_self.node {
sty_static => {
has_static_methods = true;
break;
}
_ => {}
}
}
// Create the module if necessary.
let module_parent_opt;
if has_static_methods {
let parent_link = self.get_parent_link(parent, ident);
name_bindings.define_module(privacy,
parent_link,
Some(local_def(item.id)),
TraitModuleKind,
sp);
module_parent_opt = Some(ModuleReducedGraphParent(
name_bindings.get_module()));
} else {
module_parent_opt = None;
}
// Add all the methods within to a new module.
let parent_link = self.get_parent_link(parent, ident);
name_bindings.define_module(privacy,
parent_link,
Some(local_def(item.id)),
TraitModuleKind,
sp);
let module_parent = ModuleReducedGraphParent(name_bindings.
get_module());
// Add the names of all the methods to the trait info.
let mut method_names = HashMap::new();
@ -1366,35 +1322,34 @@ impl Resolver {
let ty_m = trait_method_to_ty_method(method);
let ident = ty_m.ident;
// Add it to the trait info if not static,
// add it as a name in the trait module otherwise.
match ty_m.explicit_self.node {
sty_static => {
let def = def_static_method(
local_def(ty_m.id),
Some(local_def(item.id)),
ty_m.purity);
let (method_name_bindings, _) =
self.add_child(ident,
module_parent_opt.get(),
ForbidDuplicateValues,
ty_m.span);
method_name_bindings.define_value(Public,
def,
ty_m.span);
// Add it as a name in the trait module.
let def = match ty_m.explicit_self.node {
sty_static => {
// Static methods become `def_static_method`s.
def_static_method(local_def(ty_m.id),
Some(local_def(item.id)),
ty_m.purity)
}
_ => {
// Make sure you can't define duplicate methods
let old_sp = method_names.find_or_insert(ident, ty_m.span);
if *old_sp != ty_m.span {
self.session.span_err(ty_m.span,
fmt!("duplicate definition of method `%s`",
self.session.str_of(ident)));
self.session.span_note(*old_sp,
fmt!("first definition of method `%s` here",
self.session.str_of(ident)));
}
// Non-static methods become `def_method`s.
def_method(local_def(ty_m.id),
Some(local_def(item.id)))
}
};
let (method_name_bindings, _) =
self.add_child(ident,
module_parent,
ForbidDuplicateValues,
ty_m.span);
method_name_bindings.define_value(Public, def, ty_m.span);
// Add it to the trait info if not static.
match ty_m.explicit_self.node {
sty_static => {}
_ => {
method_names.insert(ident);
}
}
}
@ -1751,6 +1706,9 @@ impl Resolver {
child_name_bindings.define_type(privacy, def, dummy_sp());
self.structs.insert(def_id);
}
def_method(*) => {
// Ignored; handled elsewhere.
}
def_self(*) | def_arg(*) | def_local(*) |
def_prim_ty(*) | def_ty_param(*) | def_binding(*) |
def_use(*) | def_upvar(*) | def_region(*) |
@ -2391,7 +2349,8 @@ impl Resolver {
}
match type_result {
BoundResult(target_module, name_bindings) => {
debug!("(resolving single import) found type target");
debug!("(resolving single import) found type target: %?",
name_bindings.type_def.get().type_def);
import_resolution.type_target =
Some(Target(target_module, name_bindings));
import_resolution.type_id = directive.id;
@ -3269,22 +3228,8 @@ impl Resolver {
pub fn add_exports_for_module(@mut self,
exports2: &mut ~[Export2],
module_: @mut Module) {
for module_.children.iter().advance |(ident, namebindings)| {
debug!("(computing exports) maybe export '%s'",
self.session.str_of(*ident));
self.add_exports_of_namebindings(&mut *exports2,
*ident,
*namebindings,
TypeNS,
false);
self.add_exports_of_namebindings(&mut *exports2,
*ident,
*namebindings,
ValueNS,
false);
}
for module_.import_resolutions.iter().advance |(ident, importresolution)| {
for module_.import_resolutions.iter().advance |ident,
importresolution| {
if importresolution.privacy != Public {
debug!("(computing exports) not reexporting private `%s`",
self.session.str_of(*ident));
@ -4518,8 +4463,8 @@ impl Resolver {
if path.global {
return self.resolve_crate_relative_path(path,
self.xray_context,
namespace);
self.xray_context,
namespace);
}
if path.idents.len() > 1 {
@ -4947,6 +4892,22 @@ impl Resolver {
// Write the result into the def map.
debug!("(resolving expr) resolved `%s`",
self.idents_to_str(path.idents));
// First-class methods are not supported yet; error
// out here.
match def {
def_method(*) => {
self.session.span_err(expr.span,
"first-class methods \
are not supported");
self.session.span_note(expr.span,
"call the method \
using the `.` \
syntax");
}
_ => {}
}
self.record_def(expr.id, def);
}
None => {
@ -5415,7 +5376,7 @@ pub fn resolve_crate(session: Session,
-> CrateMap {
let resolver = @mut Resolver(session, lang_items, crate);
resolver.resolve();
let Resolver{def_map, export_map2, trait_map, _} = copy *resolver;
let Resolver { def_map, export_map2, trait_map, _ } = copy *resolver;
CrateMap {
def_map: def_map,
exp_map2: export_map2,

View File

@ -54,7 +54,6 @@ use middle::trans::machine;
use middle::trans::machine::{llalign_of_min, llsize_of};
use middle::trans::meth;
use middle::trans::monomorphize;
use middle::trans::reachable;
use middle::trans::tvec;
use middle::trans::type_of;
use middle::trans::type_of::*;
@ -2437,7 +2436,6 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::node_id) -> ValueRef {
}
}
ast_map::node_method(m, _, pth) => {
exprt = true;
register_method(ccx, id, pth, m)
}
ast_map::node_foreign_item(ni, _, _, pth) => {
@ -2511,7 +2509,7 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::node_id) -> ValueRef {
variant))
}
};
if !(exprt || ccx.reachable.contains(&id)) {
if !exprt {
lib::llvm::SetLinkage(val, lib::llvm::InternalLinkage);
}
ccx.item_vals.insert(id, val);
@ -2890,16 +2888,12 @@ pub fn trans_crate(sess: session::Session,
tcx: ty::ctxt,
output: &Path,
emap2: resolve::ExportMap2,
maps: astencode::Maps) -> (ContextRef, ModuleRef, LinkMeta) {
reachable_map: @mut HashSet<ast::node_id>,
maps: astencode::Maps)
-> (ContextRef, ModuleRef, LinkMeta) {
let mut symbol_hasher = hash::default_state();
let link_meta = link::build_link_meta(sess, crate, output, &mut symbol_hasher);
let reachable = reachable::find_reachable(
&crate.node.module,
emap2,
tcx,
maps.method_map
);
// Append ".rc" to crate name as LLVM module identifier.
//

View File

@ -146,7 +146,7 @@ pub fn trans(bcx: block, expr: @ast::expr) -> Callee {
ast::def_static(*) | ast::def_ty(*) | ast::def_prim_ty(*) |
ast::def_use(*) | ast::def_typaram_binder(*) |
ast::def_region(*) | ast::def_label(*) | ast::def_ty_param(*) |
ast::def_self_ty(*) => {
ast::def_self_ty(*) | ast::def_method(*) => {
bcx.tcx().sess.span_bug(
ref_expr.span,
fmt!("Cannot translate def %? \

View File

@ -117,7 +117,84 @@ pub fn BuilderRef_res(B: BuilderRef) -> BuilderRef_res {
}
}
pub type ExternMap = HashMap<@str, ValueRef>;
pub type ExternMap = @mut HashMap<@str, ValueRef>;
// Crate context. Every crate we compile has one of these.
pub struct CrateContext {
sess: session::Session,
llmod: ModuleRef,
td: TargetData,
tn: @TypeNames,
externs: ExternMap,
intrinsics: HashMap<&'static str, ValueRef>,
item_vals: @mut HashMap<ast::node_id, ValueRef>,
exp_map2: resolve::ExportMap2,
item_symbols: @mut HashMap<ast::node_id, ~str>,
link_meta: LinkMeta,
enum_sizes: @mut HashMap<ty::t, uint>,
discrims: @mut HashMap<ast::def_id, ValueRef>,
discrim_symbols: @mut HashMap<ast::node_id, @str>,
tydescs: @mut HashMap<ty::t, @mut tydesc_info>,
// Set when running emit_tydescs to enforce that no more tydescs are
// created.
finished_tydescs: @mut bool,
// Track mapping of external ids to local items imported for inlining
external: @mut HashMap<ast::def_id, Option<ast::node_id>>,
// Cache instances of monomorphized functions
monomorphized: @mut HashMap<mono_id, ValueRef>,
monomorphizing: @mut HashMap<ast::def_id, uint>,
// Cache computed type parameter uses (see type_use.rs)
type_use_cache: @mut HashMap<ast::def_id, @~[type_use::type_uses]>,
// Cache generated vtables
vtables: @mut HashMap<mono_id, ValueRef>,
// Cache of constant strings,
const_cstr_cache: @mut HashMap<@str, ValueRef>,
// Reverse-direction for const ptrs cast from globals.
// Key is an int, cast from a ValueRef holding a *T,
// Val is a ValueRef holding a *[T].
//
// Needed because LLVM loses pointer->pointee association
// when we ptrcast, and we have to ptrcast during translation
// of a [T] const because we form a slice, a [*T,int] pair, not
// a pointer to an LLVM array type.
const_globals: @mut HashMap<int, ValueRef>,
// Cache of emitted const values
const_values: @mut HashMap<ast::node_id, ValueRef>,
// Cache of external const values
extern_const_values: @mut HashMap<ast::def_id, ValueRef>,
module_data: @mut HashMap<~str, ValueRef>,
lltypes: @mut HashMap<ty::t, TypeRef>,
llsizingtypes: @mut HashMap<ty::t, TypeRef>,
adt_reprs: @mut HashMap<ty::t, @adt::Repr>,
names: namegen,
next_addrspace: addrspace_gen,
symbol_hasher: @mut hash::State,
type_hashcodes: @mut HashMap<ty::t, @str>,
type_short_names: @mut HashMap<ty::t, ~str>,
all_llvm_symbols: @mut HashSet<@str>,
tcx: ty::ctxt,
maps: astencode::Maps,
stats: @mut Stats,
upcalls: @upcall::Upcalls,
tydesc_type: TypeRef,
int_type: TypeRef,
float_type: TypeRef,
opaque_vec_type: TypeRef,
builder: BuilderRef_res,
shape_cx: shape::Ctxt,
crate_map: ValueRef,
// Set when at least one function uses GC. Needed so that
// decl_gc_metadata knows whether to link to the module metadata, which
// is not emitted by LLVM's GC pass when no functions use GC.
uses_gc: @mut bool,
dbg_cx: Option<debuginfo::DebugContext>,
do_not_commit_warning_issued: @mut bool,
reachable_map: @mut HashSet<ast::node_id>,
}
// Types used for llself.
pub struct ValSelfData {

View File

@ -37,7 +37,6 @@ pub mod foreign;
pub mod reflect;
pub mod debuginfo;
pub mod type_use;
pub mod reachable;
pub mod machine;
pub mod adt;
pub mod asm;

View File

@ -1,246 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Finds items that are externally reachable, to determine which items
// need to have their metadata (and possibly their AST) serialized.
// All items that can be referred to through an exported name are
// reachable, and when a reachable thing is inline or generic, it
// makes all other generics or inline functions that it references
// reachable as well.
use core::prelude::*;
use middle::resolve;
use middle::ty;
use middle::typeck;
use core::hashmap::HashSet;
use syntax::ast;
use syntax::ast::*;
use syntax::ast_util::def_id_of_def;
use syntax::attr;
use syntax::codemap;
use syntax::print::pprust::expr_to_str;
use syntax::{visit, ast_map};
pub type map = @HashSet<node_id>;
struct ctx<'self> {
exp_map2: resolve::ExportMap2,
tcx: ty::ctxt,
method_map: typeck::method_map,
rmap: &'self mut HashSet<node_id>,
}
pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2,
tcx: ty::ctxt, method_map: typeck::method_map) -> map {
let mut rmap = HashSet::new();
{
let cx = @mut ctx {
exp_map2: exp_map2,
tcx: tcx,
method_map: method_map,
rmap: &mut rmap
};
traverse_public_mod(cx, ast::crate_node_id, crate_mod);
traverse_all_resources_and_impls(cx, crate_mod);
}
return @rmap;
}
fn traverse_exports(cx: @mut ctx, mod_id: node_id) -> bool {
let mut found_export = false;
match cx.exp_map2.find(&mod_id) {
Some(ref exp2s) => {
for (*exp2s).iter().advance |e2| {
found_export = true;
traverse_def_id(cx, e2.def_id)
};
}
None => ()
}
return found_export;
}
fn traverse_def_id(cx: @mut ctx, did: def_id) {
if did.crate != local_crate { return; }
match cx.tcx.items.find(&did.node) {
None => (), // This can happen for self, for example
Some(&ast_map::node_item(item, _)) => traverse_public_item(cx, item),
Some(&ast_map::node_method(_, impl_id, _)) => traverse_def_id(cx, impl_id),
Some(&ast_map::node_foreign_item(item, _, _, _)) => {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(item.id);
}
Some(&ast_map::node_variant(ref v, _, _)) => {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(v.node.id);
}
_ => ()
}
}
fn traverse_public_mod(cx: @mut ctx, mod_id: node_id, m: &_mod) {
if !traverse_exports(cx, mod_id) {
// No exports, so every local item is exported
for m.items.iter().advance |item| {
traverse_public_item(cx, *item);
}
}
}
fn traverse_public_item(cx: @mut ctx, item: @item) {
{
// FIXME #6021: naming rmap shouldn't be necessary
let cx = &mut *cx;
let rmap: &mut HashSet<node_id> = cx.rmap;
if rmap.contains(&item.id) { return; }
rmap.insert(item.id);
}
match item.node {
item_mod(ref m) => traverse_public_mod(cx, item.id, m),
item_foreign_mod(ref nm) => {
if !traverse_exports(cx, item.id) {
for nm.items.iter().advance |item| {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(item.id);
}
}
}
item_fn(_, _, _, ref generics, ref blk) => {
if generics.ty_params.len() > 0u ||
attr::find_inline_attr(item.attrs) != attr::ia_none {
traverse_inline_body(cx, blk);
}
}
item_impl(ref generics, _, _, ref ms) => {
for ms.iter().advance |m| {
if generics.ty_params.len() > 0u ||
m.generics.ty_params.len() > 0u ||
attr::find_inline_attr(m.attrs) != attr::ia_none
{
{
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(m.id);
}
traverse_inline_body(cx, &m.body);
}
}
}
item_struct(ref struct_def, _) => {
for struct_def.ctor_id.iter().advance |&ctor_id| {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(ctor_id);
}
}
item_ty(t, _) => {
traverse_ty(t, (cx,
visit::mk_vt(@visit::Visitor {visit_ty: traverse_ty,
..*visit::default_visitor()})))
}
item_static(*) |
item_enum(*) | item_trait(*) => (),
item_mac(*) => fail!("item macros unimplemented")
}
}
fn traverse_ty<'a>(ty: @Ty, (cx, v): (@mut ctx<'a>, visit::vt<@mut ctx<'a>>)) {
{
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
if cx.rmap.contains(&ty.id) { return; }
cx.rmap.insert(ty.id);
}
match ty.node {
ty_path(p, _bounds, p_id) => {
match cx.tcx.def_map.find(&p_id) {
// Kind of a hack to check this here, but I'm not sure what else
// to do
Some(&def_prim_ty(_)) => { /* do nothing */ }
Some(&d) => traverse_def_id(cx, def_id_of_def(d)),
None => { /* do nothing -- but should we fail here? */ }
}
for p.types.iter().advance |t| {
(v.visit_ty)(*t, (cx, v));
}
}
_ => visit::visit_ty(ty, (cx, v))
}
}
fn traverse_inline_body(cx: @mut ctx, body: &blk) {
fn traverse_expr<'a>(e: @expr, (cx, v): (@mut ctx<'a>,
visit::vt<@mut ctx<'a>>)) {
match e.node {
expr_path(_) => {
match cx.tcx.def_map.find(&e.id) {
Some(&d) => {
traverse_def_id(cx, def_id_of_def(d));
}
None => cx.tcx.sess.span_bug(
e.span,
fmt!("Unbound node id %? while traversing %s",
e.id,
expr_to_str(e, cx.tcx.sess.intr())))
}
}
expr_method_call(*) => {
match cx.method_map.find(&e.id) {
Some(&typeck::method_map_entry {
origin: typeck::method_static(did),
_
}) => {
traverse_def_id(cx, did);
}
Some(_) => {}
None => {
cx.tcx.sess.span_bug(e.span, "expr_method_call not in \
method map");
}
}
}
_ => ()
}
visit::visit_expr(e, (cx, v));
}
// Don't ignore nested items: for example if a generic fn contains a
// generic impl (as in deque::create), we need to monomorphize the
// impl as well
fn traverse_item(i: @item, (cx, _v): (@mut ctx, visit::vt<@mut ctx>)) {
traverse_public_item(cx, i);
}
visit::visit_block(body, (cx, visit::mk_vt(@visit::Visitor {
visit_expr: traverse_expr,
visit_item: traverse_item,
..*visit::default_visitor()
})));
}
fn traverse_all_resources_and_impls(cx: @mut ctx, crate_mod: &_mod) {
visit::visit_mod(
crate_mod,
codemap::dummy_sp(),
0,
(cx,
visit::mk_vt(@visit::Visitor {
visit_expr: |_e, (_cx, _v)| { },
visit_item: |i, (cx, v)| {
visit::visit_item(i, (cx, v));
match i.node {
item_impl(*) => {
traverse_public_item(cx, i);
}
_ => ()
}
},
..*visit::default_visitor()
})));
}

View File

@ -791,10 +791,6 @@ impl FnCtxt {
ast_ty_to_ty(self, self, ast_t)
}
pub fn expr_to_str(&self, expr: @ast::expr) -> ~str {
expr.repr(self.tcx())
}
pub fn pat_to_str(&self, pat: @ast::pat) -> ~str {
pat.repr(self.tcx())
}
@ -3293,6 +3289,9 @@ pub fn ty_param_bounds_and_ty_for_def(fcx: @mut FnCtxt,
ast::def_self_ty(*) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found self ty");
}
ast::def_method(*) => {
fcx.ccx.tcx.sess.span_bug(sp, "expected value but found method");
}
}
}

View File

@ -81,10 +81,20 @@ pub fn collect_item_types(ccx: @mut CrateCtxt, crate: &ast::crate) {
})));
}
impl CrateCtxt {
pub trait ToTy {
fn to_ty<RS:region_scope + Copy + 'static>(
&self, rs: &RS, ast_ty: &ast::Ty) -> ty::t
{
&self,
rs: &RS,
ast_ty: &ast::Ty)
-> ty::t;
}
impl ToTy for CrateCtxt {
fn to_ty<RS:region_scope + Copy + 'static>(
&self,
rs: &RS,
ast_ty: &ast::Ty)
-> ty::t {
ast_ty_to_ty(self, rs, ast_ty)
}
}

View File

@ -64,7 +64,8 @@ use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::{cres, InferCtxt, ures};
use middle::typeck::infer::unify::{InferCtxtMethods, UnifyInferCtxtMethods};
use middle::typeck::infer::{InferCtxt, cres, ures};
use util::common::indent;
use core::result::{iter_vec2, map_vec2};

View File

@ -71,15 +71,53 @@ impl LatticeValue for ty::t {
}
}
impl CombineFields {
pub fn var_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a_id:
V,
b_id:
V)
->
ures {
pub trait CombineFieldsLatticeMethods {
fn var_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a_id: V,
b_id: V)
-> ures;
/// make variable a subtype of T
fn var_sub_t<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b: T)
-> ures;
fn t_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a: T,
b_id: V)
-> ures;
fn merge_bnd<T:Copy + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op: LatticeOp<T>)
-> cres<Bound<T>>;
fn set_var_to_merged_bounds<T:Copy + InferStr + LatticeValue,
V:Copy+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures;
fn bnds<T:Copy + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures;
}
impl CombineFieldsLatticeMethods for CombineFields {
fn var_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b_id: V)
-> ures {
/*!
*
* Make one variable a subtype of another variable. This is a
@ -127,12 +165,12 @@ impl CombineFields {
}
/// make variable a subtype of T
pub fn var_sub_t<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a_id: V,
b: T)
-> ures
{
fn var_sub_t<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b: T)
-> ures {
/*!
*
* Make a variable (`a_id`) a subtype of the concrete type `b` */
@ -151,12 +189,12 @@ impl CombineFields {
a_id, a_bounds, b_bounds, node_a.rank)
}
pub fn t_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a: T,
b_id: V)
-> ures
{
fn t_sub_var<T:Copy + InferStr + LatticeValue,
V:Copy + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a: T,
b_id: V)
-> ures {
/*!
*
* Make a concrete type (`a`) a subtype of the variable `b_id` */
@ -175,12 +213,12 @@ impl CombineFields {
b_id, a_bounds, b_bounds, node_b.rank)
}
pub fn merge_bnd<T:Copy + InferStr + LatticeValue>(&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op:
LatticeOp<T>)
-> cres<Bound<T>> {
fn merge_bnd<T:Copy + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op: LatticeOp<T>)
-> cres<Bound<T>> {
/*!
*
* Combines two bounds into a more general bound. */
@ -202,14 +240,14 @@ impl CombineFields {
}
}
pub fn set_var_to_merged_bounds<T:Copy + InferStr + LatticeValue,
V:Copy+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures {
fn set_var_to_merged_bounds<T:Copy + InferStr + LatticeValue,
V:Copy+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures {
/*!
*
* Updates the bounds for the variable `v_id` to be the intersection
@ -264,10 +302,10 @@ impl CombineFields {
uok()
}
pub fn bnds<T:Copy + InferStr + LatticeValue>(&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures {
fn bnds<T:Copy + InferStr + LatticeValue>(&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures {
debug!("bnds(%s <: %s)", a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _r = indenter();

View File

@ -54,7 +54,7 @@ use middle::ty;
use middle::typeck::infer::{Bounds, cyclic_ty, fixup_err, fres, InferCtxt};
use middle::typeck::infer::{region_var_bound_by_region_var, unresolved_ty};
use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::unify::Root;
use middle::typeck::infer::unify::{Root, UnifyInferCtxtMethods};
use util::common::{indent, indenter};
use util::ppaux::ty_to_str;

View File

@ -18,6 +18,7 @@ use middle::typeck::infer::combine::*;
use middle::typeck::infer::cres;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::InferCtxt;
use middle::typeck::infer::lattice::CombineFieldsLatticeMethods;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::to_str::InferStr;
use util::common::{indent, indenter};

View File

@ -40,9 +40,31 @@ pub trait UnifyVid<T> {
-> &'v mut ValsAndBindings<Self, T>;
}
impl InferCtxt {
pub fn get<T:Copy, V:Copy+Eq+Vid+UnifyVid<T>>(&mut self, vid: V)
-> Node<V, T> {
pub trait UnifyInferCtxtMethods {
fn get<T:Copy,
V:Copy + Eq + Vid + UnifyVid<T>>(
&mut self,
vid: V)
-> Node<V, T>;
fn set<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
vid: V,
new_v: VarValue<V, T>);
fn unify<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint);
}
impl UnifyInferCtxtMethods for InferCtxt {
fn get<T:Copy,
V:Copy + Eq + Vid + UnifyVid<T>>(
&mut self,
vid: V)
-> Node<V, T> {
/*!
*
* Find the root node for `vid`. This uses the standard
@ -84,10 +106,11 @@ impl InferCtxt {
}
}
pub fn set<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(&mut self,
vid: V,
new_v: VarValue<V, T>) {
fn set<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
vid: V,
new_v: VarValue<V, T>) {
/*!
*
* Sets the value for `vid` to `new_v`. `vid` MUST be a root node!
@ -102,11 +125,12 @@ impl InferCtxt {
vb.vals.insert(vid.to_uint(), new_v);
}
pub fn unify<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint) {
fn unify<T:Copy + InferStr,
V:Copy + Vid + ToStr + UnifyVid<T>>(
&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint) {
// Rank optimization: if you don't know what it is, check
// out <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>
@ -155,14 +179,31 @@ pub fn mk_err<T:SimplyUnifiable>(a_is_expected: bool,
}
}
impl InferCtxt {
pub fn simple_vars<T:Copy+Eq+InferStr+SimplyUnifiable,
V:Copy+Eq+Vid+ToStr+UnifyVid<Option<T>>>(&mut self,
a_is_expected:
bool,
a_id: V,
b_id: V)
-> ures {
pub trait InferCtxtMethods {
fn simple_vars<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b_id: V)
-> ures;
fn simple_var_t<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b: T)
-> ures;
}
impl InferCtxtMethods for InferCtxt {
fn simple_vars<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b_id: V)
-> ures {
/*!
*
* Unifies two simple variables. Because simple variables do
@ -194,13 +235,13 @@ impl InferCtxt {
return uok();
}
pub fn simple_var_t<T:Copy+Eq+InferStr+SimplyUnifiable,
V:Copy+Eq+Vid+ToStr+UnifyVid<Option<T>>>(&mut self,
a_is_expected
: bool,
a_id: V,
b: T)
-> ures {
fn simple_var_t<T:Copy + Eq + InferStr + SimplyUnifiable,
V:Copy + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b: T)
-> ures {
/*!
*
* Sets the value of the variable `a_id` to `b`. Because

View File

@ -80,6 +80,7 @@ pub mod middle {
pub mod moves;
pub mod entry;
pub mod effect;
pub mod reachable;
}
pub mod front {

View File

@ -185,7 +185,20 @@ impl<'self> PkgScript<'self> {
}
impl Ctx {
pub trait CtxMethods {
fn run(&self, cmd: &str, args: ~[~str]);
fn do_cmd(&self, _cmd: &str, _pkgname: &str);
fn build(&self, workspace: &Path, pkgid: &PkgId);
fn clean(&self, workspace: &Path, id: &PkgId);
fn info(&self);
fn install(&self, workspace: &Path, id: &PkgId);
fn prefer(&self, _id: &str, _vers: Option<~str>);
fn test(&self);
fn uninstall(&self, _id: &str, _vers: Option<~str>);
fn unprefer(&self, _id: &str, _vers: Option<~str>);
}
impl CtxMethods for Ctx {
fn run(&self, cmd: &str, args: ~[~str]) {
match cmd {

View File

@ -335,8 +335,8 @@ mod stat {
}
}
impl Path {
#[cfg(target_os = "win32")]
impl WindowsPath {
pub fn stat(&self) -> Option<libc::stat> {
unsafe {
do str::as_c_str(self.to_str()) |buf| {
@ -349,12 +349,35 @@ impl Path {
}
}
#[cfg(unix)]
pub fn lstat(&self) -> Option<libc::stat> {
pub fn exists(&self) -> bool {
match self.stat() {
None => false,
Some(_) => true,
}
}
pub fn get_size(&self) -> Option<i64> {
match self.stat() {
None => None,
Some(ref st) => Some(st.st_size as i64),
}
}
pub fn get_mode(&self) -> Option<uint> {
match self.stat() {
None => None,
Some(ref st) => Some(st.st_mode as uint),
}
}
}
#[cfg(not(target_os = "win32"))]
impl PosixPath {
pub fn stat(&self) -> Option<libc::stat> {
unsafe {
do str::as_c_str(self.to_str()) |buf| {
do str::as_c_str(self.to_str()) |buf| {
let mut st = stat::arch::default_stat();
match libc::lstat(buf, &mut st) {
match libc::stat(buf, &mut st) {
0 => Some(st),
_ => None,
}
@ -396,7 +419,7 @@ impl Path {
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
impl Path {
impl PosixPath {
pub fn get_atime(&self) -> Option<(i64, int)> {
match self.stat() {
None => None,
@ -428,9 +451,24 @@ impl Path {
}
}
#[cfg(unix)]
impl PosixPath {
pub fn lstat(&self) -> Option<libc::stat> {
unsafe {
do str::as_c_str(self.to_str()) |buf| {
let mut st = stat::arch::default_stat();
match libc::lstat(buf, &mut st) {
0 => Some(st),
_ => None,
}
}
}
}
}
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "macos")]
impl Path {
impl PosixPath {
pub fn get_birthtime(&self) -> Option<(i64, int)> {
match self.stat() {
None => None,
@ -443,7 +481,7 @@ impl Path {
}
#[cfg(target_os = "win32")]
impl Path {
impl WindowsPath {
pub fn get_atime(&self) -> Option<(i64, int)> {
match self.stat() {
None => None,

View File

@ -859,7 +859,8 @@ pub mod raw {
/// invalidated later.
pub unsafe fn c_str_to_static_slice(s: *libc::c_char) -> &'static str {
let s = s as *u8;
let mut (curr, len) = (s, 0u);
let mut curr = s;
let mut len = 0u;
while *curr != 0u8 {
len += 1u;
curr = ptr::offset(s, len);

View File

@ -205,7 +205,8 @@ pub enum def {
def_struct(def_id),
def_typaram_binder(node_id), /* struct, impl or trait with ty params */
def_region(node_id),
def_label(node_id)
def_label(node_id),
def_method(def_id /* method */, Option<def_id> /* trait */),
}
@ -1047,7 +1048,7 @@ pub struct trait_ref {
pub enum visibility { public, private, inherited }
impl visibility {
fn inherit_from(&self, parent_visibility: visibility) -> visibility {
pub fn inherit_from(&self, parent_visibility: visibility) -> visibility {
match self {
&inherited => parent_visibility,
&public | &private => *self

View File

@ -59,7 +59,7 @@ pub fn def_id_of_def(d: def) -> def_id {
def_fn(id, _) | def_static_method(id, _, _) | def_mod(id) |
def_foreign_mod(id) | def_static(id, _) |
def_variant(_, id) | def_ty(id) | def_ty_param(id, _) |
def_use(id) | def_struct(id) | def_trait(id) => {
def_use(id) | def_struct(id) | def_trait(id) | def_method(id, _) => {
id
}
def_arg(id, _) | def_local(id, _) | def_self(id, _) | def_self_ty(id)

View File

@ -51,251 +51,3 @@ pub fn token_to_str(token: &token::Token) -> ~str {
token::to_str(get_ident_interner(), token)
}
impl Parser {
// convert a token to a string using self's reader
pub fn token_to_str(&self, token: &token::Token) -> ~str {
token::to_str(get_ident_interner(), token)
}
// convert the current token to a string using self's reader
pub fn this_token_to_str(&self) -> ~str {
self.token_to_str(self.token)
}
pub fn unexpected_last(&self, t: &token::Token) -> ! {
self.span_fatal(
*self.last_span,
fmt!(
"unexpected token: `%s`",
self.token_to_str(t)
)
);
}
pub fn unexpected(&self) -> ! {
self.fatal(
fmt!(
"unexpected token: `%s`",
self.this_token_to_str()
)
);
}
// expect and consume the token t. Signal an error if
// the next token is not t.
pub fn expect(&self, t: &token::Token) {
if *self.token == *t {
self.bump();
} else {
self.fatal(
fmt!(
"expected `%s` but found `%s`",
self.token_to_str(t),
self.this_token_to_str()
)
)
}
}
pub fn parse_ident(&self) -> ast::ident {
self.check_strict_keywords();
self.check_reserved_keywords();
match *self.token {
token::IDENT(i, _) => {
self.bump();
i
}
token::INTERPOLATED(token::nt_ident(*)) => {
self.bug("ident interpolation not converted to real token");
}
_ => {
self.fatal(
fmt!(
"expected ident, found `%s`",
self.this_token_to_str()
)
);
}
}
}
pub fn parse_path_list_ident(&self) -> ast::path_list_ident {
let lo = self.span.lo;
let ident = self.parse_ident();
let hi = self.last_span.hi;
spanned(lo, hi, ast::path_list_ident_ { name: ident,
id: self.get_id() })
}
// consume token 'tok' if it exists. Returns true if the given
// token was present, false otherwise.
pub fn eat(&self, tok: &token::Token) -> bool {
return if *self.token == *tok { self.bump(); true } else { false };
}
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
token::is_keyword(kw, self.token)
}
// if the next token is the given keyword, eat it and return
// true. Otherwise, return false.
pub fn eat_keyword(&self, kw: keywords::Keyword) -> bool {
let is_kw = match *self.token {
token::IDENT(sid, false) => kw.to_ident().name == sid.name,
_ => false
};
if is_kw { self.bump() }
is_kw
}
// if the given word is not a keyword, signal an error.
// if the next token is not the given word, signal an error.
// otherwise, eat it.
pub fn expect_keyword(&self, kw: keywords::Keyword) {
if !self.eat_keyword(kw) {
self.fatal(
fmt!(
"expected `%s`, found `%s`",
self.id_to_str(kw.to_ident()),
self.this_token_to_str()
)
);
}
}
// signal an error if the given string is a strict keyword
pub fn check_strict_keywords(&self) {
if token::is_strict_keyword(self.token) {
self.span_err(*self.last_span,
fmt!("found `%s` in ident position", self.this_token_to_str()));
}
}
// signal an error if the current token is a reserved keyword
pub fn check_reserved_keywords(&self) {
if token::is_reserved_keyword(self.token) {
self.fatal(fmt!("`%s` is a reserved keyword", self.this_token_to_str()));
}
}
// expect and consume a GT. if a >> is seen, replace it
// with a single > and continue. If a GT is not seen,
// signal an error.
pub fn expect_gt(&self) {
if *self.token == token::GT {
self.bump();
} else if *self.token == token::BINOP(token::SHR) {
self.replace_token(
token::GT,
self.span.lo + BytePos(1u),
self.span.hi
);
} else {
let mut s: ~str = ~"expected `";
s.push_str(self.token_to_str(&token::GT));
s.push_str("`, found `");
s.push_str(self.this_token_to_str());
s.push_str("`");
self.fatal(s);
}
}
// parse a sequence bracketed by '<' and '>', stopping
// before the '>'.
pub fn parse_seq_to_before_gt<T: Copy>(&self,
sep: Option<token::Token>,
f: &fn(&Parser) -> T)
-> OptVec<T> {
let mut first = true;
let mut v = opt_vec::Empty;
while *self.token != token::GT
&& *self.token != token::BINOP(token::SHR) {
match sep {
Some(ref t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
v.push(f(self));
}
return v;
}
pub fn parse_seq_to_gt<T: Copy>(&self,
sep: Option<token::Token>,
f: &fn(&Parser) -> T)
-> OptVec<T> {
let v = self.parse_seq_to_before_gt(sep, f);
self.expect_gt();
return v;
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_seq_to_end<T: Copy>(&self,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> ~[T] {
let val = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
val
}
// parse a sequence, not including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_seq_to_before_end<T: Copy>(&self,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> ~[T] {
let mut first: bool = true;
let mut v: ~[T] = ~[];
while *self.token != *ket {
match sep.sep {
Some(ref t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
if sep.trailing_sep_allowed && *self.token == *ket { break; }
v.push(f(self));
}
return v;
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_unspanned_seq<T: Copy>(&self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> ~[T] {
self.expect(bra);
let result = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
result
}
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
pub fn parse_seq<T: Copy>(&self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> spanned<~[T]> {
let lo = self.span.lo;
self.expect(bra);
let result = self.parse_seq_to_before_end(ket, sep, f);
let hi = self.span.hi;
self.bump();
spanned(lo, hi, result)
}
}

View File

@ -73,7 +73,26 @@ impl to_bytes::IterBytes for ObsoleteSyntax {
}
}
impl Parser {
pub trait ParserObsoleteMethods {
/// Reports an obsolete syntax non-fatal error.
fn obsolete(&self, sp: span, kind: ObsoleteSyntax);
// Reports an obsolete syntax non-fatal error, and returns
// a placeholder expression
fn obsolete_expr(&self, sp: span, kind: ObsoleteSyntax) -> @expr;
fn report(&self,
sp: span,
kind: ObsoleteSyntax,
kind_str: &str,
desc: &str);
fn token_is_obsolete_ident(&self, ident: &str, token: &Token) -> bool;
fn is_obsolete_ident(&self, ident: &str) -> bool;
fn eat_obsolete_ident(&self, ident: &str) -> bool;
fn try_parse_obsolete_struct_ctor(&self) -> bool;
fn try_parse_obsolete_with(&self) -> bool;
fn try_parse_obsolete_priv_section(&self, attrs: &[attribute]) -> bool;
}
impl ParserObsoleteMethods for Parser {
/// Reports an obsolete syntax non-fatal error.
pub fn obsolete(&self, sp: span, kind: ObsoleteSyntax) {
let (kind_str, desc) = match kind {

View File

@ -64,7 +64,7 @@ use codemap::{span, BytePos, spanned, mk_sp};
use codemap;
use parse::attr::parser_attr;
use parse::classify;
use parse::common::{seq_sep_none};
use parse::common::{SeqSep, seq_sep_none};
use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed};
use parse::lexer::reader;
use parse::lexer::TokenAndSpan;
@ -84,7 +84,7 @@ use parse::obsolete::{ObsoletePurity, ObsoleteStaticMethod};
use parse::obsolete::{ObsoleteConstItem, ObsoleteFixedLengthVectorType};
use parse::obsolete::{ObsoleteNamedExternModule, ObsoleteMultipleLocalDecl};
use parse::obsolete::{ObsoleteMutWithMultipleBindings};
use parse::obsolete::{ObsoletePatternCopyKeyword};
use parse::obsolete::{ObsoletePatternCopyKeyword, ParserObsoleteMethods};
use parse::token::{can_begin_expr, get_ident_interner, ident_to_str, is_ident};
use parse::token::{is_ident_or_path};
use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents};
@ -274,6 +274,253 @@ impl Drop for Parser {
}
impl Parser {
// convert a token to a string using self's reader
pub fn token_to_str(&self, token: &token::Token) -> ~str {
token::to_str(get_ident_interner(), token)
}
// convert the current token to a string using self's reader
pub fn this_token_to_str(&self) -> ~str {
self.token_to_str(self.token)
}
pub fn unexpected_last(&self, t: &token::Token) -> ! {
self.span_fatal(
*self.last_span,
fmt!(
"unexpected token: `%s`",
self.token_to_str(t)
)
);
}
pub fn unexpected(&self) -> ! {
self.fatal(
fmt!(
"unexpected token: `%s`",
self.this_token_to_str()
)
);
}
// expect and consume the token t. Signal an error if
// the next token is not t.
pub fn expect(&self, t: &token::Token) {
if *self.token == *t {
self.bump();
} else {
self.fatal(
fmt!(
"expected `%s` but found `%s`",
self.token_to_str(t),
self.this_token_to_str()
)
)
}
}
pub fn parse_ident(&self) -> ast::ident {
self.check_strict_keywords();
self.check_reserved_keywords();
match *self.token {
token::IDENT(i, _) => {
self.bump();
i
}
token::INTERPOLATED(token::nt_ident(*)) => {
self.bug("ident interpolation not converted to real token");
}
_ => {
self.fatal(
fmt!(
"expected ident, found `%s`",
self.this_token_to_str()
)
);
}
}
}
pub fn parse_path_list_ident(&self) -> ast::path_list_ident {
let lo = self.span.lo;
let ident = self.parse_ident();
let hi = self.last_span.hi;
spanned(lo, hi, ast::path_list_ident_ { name: ident,
id: self.get_id() })
}
// consume token 'tok' if it exists. Returns true if the given
// token was present, false otherwise.
pub fn eat(&self, tok: &token::Token) -> bool {
return if *self.token == *tok { self.bump(); true } else { false };
}
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
token::is_keyword(kw, self.token)
}
// if the next token is the given keyword, eat it and return
// true. Otherwise, return false.
pub fn eat_keyword(&self, kw: keywords::Keyword) -> bool {
let is_kw = match *self.token {
token::IDENT(sid, false) => kw.to_ident().name == sid.name,
_ => false
};
if is_kw { self.bump() }
is_kw
}
// if the given word is not a keyword, signal an error.
// if the next token is not the given word, signal an error.
// otherwise, eat it.
pub fn expect_keyword(&self, kw: keywords::Keyword) {
if !self.eat_keyword(kw) {
self.fatal(
fmt!(
"expected `%s`, found `%s`",
*self.id_to_str(kw.to_ident()),
self.this_token_to_str()
)
);
}
}
// signal an error if the given string is a strict keyword
pub fn check_strict_keywords(&self) {
if token::is_strict_keyword(self.token) {
self.span_err(*self.last_span,
fmt!("found `%s` in ident position", self.this_token_to_str()));
}
}
// signal an error if the current token is a reserved keyword
pub fn check_reserved_keywords(&self) {
if token::is_reserved_keyword(self.token) {
self.fatal(fmt!("`%s` is a reserved keyword", self.this_token_to_str()));
}
}
// expect and consume a GT. if a >> is seen, replace it
// with a single > and continue. If a GT is not seen,
// signal an error.
pub fn expect_gt(&self) {
if *self.token == token::GT {
self.bump();
} else if *self.token == token::BINOP(token::SHR) {
self.replace_token(
token::GT,
self.span.lo + BytePos(1u),
self.span.hi
);
} else {
let mut s: ~str = ~"expected `";
s.push_str(self.token_to_str(&token::GT));
s.push_str("`, found `");
s.push_str(self.this_token_to_str());
s.push_str("`");
self.fatal(s);
}
}
// parse a sequence bracketed by '<' and '>', stopping
// before the '>'.
pub fn parse_seq_to_before_gt<T: Copy>(&self,
sep: Option<token::Token>,
f: &fn(&Parser) -> T)
-> OptVec<T> {
let mut first = true;
let mut v = opt_vec::Empty;
while *self.token != token::GT
&& *self.token != token::BINOP(token::SHR) {
match sep {
Some(ref t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
v.push(f(self));
}
return v;
}
pub fn parse_seq_to_gt<T: Copy>(&self,
sep: Option<token::Token>,
f: &fn(&Parser) -> T)
-> OptVec<T> {
let v = self.parse_seq_to_before_gt(sep, f);
self.expect_gt();
return v;
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_seq_to_end<T: Copy>(&self,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> ~[T] {
let val = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
val
}
// parse a sequence, not including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_seq_to_before_end<T: Copy>(&self,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> ~[T] {
let mut first: bool = true;
let mut v: ~[T] = ~[];
while *self.token != *ket {
match sep.sep {
Some(ref t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
}
if sep.trailing_sep_allowed && *self.token == *ket { break; }
v.push(f(self));
}
return v;
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
pub fn parse_unspanned_seq<T: Copy>(&self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> ~[T] {
self.expect(bra);
let result = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
result
}
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
pub fn parse_seq<T: Copy>(&self,
bra: &token::Token,
ket: &token::Token,
sep: SeqSep,
f: &fn(&Parser) -> T)
-> spanned<~[T]> {
let lo = self.span.lo;
self.expect(bra);
let result = self.parse_seq_to_before_end(ket, sep, f);
let hi = self.span.hi;
self.bump();
spanned(lo, hi, result)
}
// advance the parser by one token
pub fn bump(&self) {
*self.last_span = copy *self.span;