removed unused imports (and one unused argument)

This commit is contained in:
John Clements 2013-06-04 11:09:18 -07:00
parent 367eddf5b1
commit 8dad2bb281
15 changed files with 14 additions and 26 deletions

View File

@ -144,7 +144,7 @@ fn fold_item(cx: @mut TestCtxt, i: @ast::item, fld: @fold::ast_fold)
-> Option<@ast::item> {
cx.path.push(i.ident);
debug!("current path: %s",
ast_util::path_name_i(copy cx.path, token::get_ident_interner()));
ast_util::path_name_i(copy cx.path));
if is_test_fn(cx, i) || is_bench_fn(i) {
match i.node {
@ -412,13 +412,10 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::expr {
let ext_cx = cx.ext_cx;
debug!("encoding %s", ast_util::path_name_i(path,
token::get_ident_interner()));
debug!("encoding %s", ast_util::path_name_i(path));
let name_lit: ast::lit =
nospan(ast::lit_str(@ast_util::path_name_i(
path,
token::get_ident_interner())));
nospan(ast::lit_str(@ast_util::path_name_i(path)));
let name_expr = @ast::expr {
id: cx.sess.next_node_id(),

View File

@ -142,8 +142,7 @@ fn add_to_index(ecx: @EncodeContext,
full_path.push(name);
index.push(
entry {
val: ast_util::path_name_i(full_path,
token::get_ident_interner()),
val: ast_util::path_name_i(full_path),
pos: ebml_w.writer.tell()
});
}

View File

@ -15,7 +15,7 @@ use core::prelude::*;
use codemap::{span, spanned};
use abi::AbiSet;
use opt_vec::OptVec;
use parse::token::{get_ident_interner, ident_to_str, interner_get, str_to_ident};
use parse::token::{ident_to_str, interner_get, str_to_ident};
use core::hashmap::HashMap;
use core::option::Option;

View File

@ -24,12 +24,11 @@ use core::option;
use core::str;
use core::to_bytes;
pub fn path_name_i(idents: &[ident], intr: @token::ident_interner) -> ~str {
pub fn path_name_i(idents: &[ident]) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
str::connect(idents.map(|i| copy *token::interner_get(i.name)), "::")
}
pub fn path_to_ident(p: @Path) -> ident { copy *p.idents.last() }
pub fn local_def(id: node_id) -> def_id {

View File

@ -15,7 +15,7 @@ use codemap::span;
use ext::base::*;
use ext::base;
use parse::token;
use parse::token::{get_ident_interner, str_to_ident};
use parse::token::{str_to_ident};
pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {

View File

@ -25,7 +25,7 @@ use ext::build::AstBuilder;
use core::option;
use core::unstable::extfmt::ct::*;
use core::vec;
use parse::token::{get_ident_interner, str_to_ident};
use parse::token::{str_to_ident};
pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {

View File

@ -52,7 +52,6 @@ use ext::pipes::pipec::gen_init;
use ext::pipes::proto::visit;
use parse::lexer::{new_tt_reader, reader};
use parse::parser::Parser;
use parse::token::{get_ident_interner};
use core::option::None;
pub mod ast_builder;

View File

@ -17,7 +17,7 @@ use ext::pipes::proto::*;
use parse::common::SeqSep;
use parse::parser;
use parse::token;
use parse::token::{get_ident_interner, interner_get};
use parse::token::{interner_get};
pub trait proto_parser {
fn parse_proto(&self, id: ~str) -> protocol;

View File

@ -17,7 +17,6 @@ use ext::base;
use parse::lexer::{new_tt_reader, reader};
use parse::parser::Parser;
use parse::token::keywords;
use parse::token::{get_ident_interner};
use core::vec;

View File

@ -93,7 +93,6 @@ pub fn add_new_extension(cx: @ExtCtxt,
let mut best_fail_msg = ~"internal error: ran no matchers";
let s_d = cx.parse_sess().span_diagnostic;
let itr = get_ident_interner();
for lhses.eachi |i, lhs| { // try each arm's matchers
match *lhs {

View File

@ -15,8 +15,8 @@ use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
use codemap::{span, dummy_sp};
use diagnostic::span_handler;
use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner};
use parse::token::{ident_to_str, get_ident_interner};
use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident};
use parse::token::{ident_to_str};
use parse::lexer::TokenAndSpan;
use core::hashmap::HashMap;

View File

@ -324,7 +324,6 @@ pub fn gather_comments_and_literals(span_diagnostic:
srdr: @io::Reader)
-> (~[cmnt], ~[lit]) {
let src = @str::from_bytes(srdr.read_whole_stream());
let itr = parse::token::mk_fake_ident_interner();
let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src);
let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);

View File

@ -17,7 +17,7 @@ use diagnostic::span_handler;
use ext::tt::transcribe::{tt_next_token};
use ext::tt::transcribe::{dup_tt_reader};
use parse::token;
use parse::token::{get_ident_interner, str_to_ident};
use parse::token::{str_to_ident};
use core::char;
use core::either;
@ -779,7 +779,7 @@ mod test {
use core::option::None;
use diagnostic;
use parse::token;
use parse::token::{get_ident_interner, str_to_ident};
use parse::token::{str_to_ident};
// represents a testing reader (incl. both reader and interner)
struct Env {

View File

@ -19,7 +19,6 @@ use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
use parse::attr::parser_attr;
use parse::lexer::reader;
use parse::parser::Parser;
use parse::token::{ident_interner, get_ident_interner};
use core::io;
use core::option::{None, Option, Some};
@ -342,10 +341,9 @@ mod test {
use codemap::{span, BytePos, spanned};
use opt_vec;
use ast;
use ast_util::new_ident;
use abi;
use parse::parser::Parser;
use parse::token::{intern, str_to_ident};
use parse::token::{str_to_ident};
use util::parser_testing::{string_to_tts_and_sess, string_to_parser};
use util::parser_testing::{string_to_expr, string_to_item};
use util::parser_testing::{string_to_stmt};

View File

@ -2236,7 +2236,6 @@ mod test {
use codemap;
use core::cmp::Eq;
use core::option::None;
use parse;
use parse::token;
fn string_check<T:Eq> (given : &T, expected: &T) {