ICH: Hash MacroDefs in a mostly stable way.

This commit is contained in:
Michael Woerister 2016-11-15 15:20:39 -05:00
parent 003b1699c0
commit a5137afe8c
2 changed files with 143 additions and 3 deletions

View File

@ -46,6 +46,7 @@ use self::caching_codemap_view::CachingCodemapView;
use self::hasher::IchHasher;
use ich::Fingerprint;
mod def_path_hash;
mod svh_visitor;
mod caching_codemap_view;
@ -113,8 +114,12 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
visitor.calculate_def_id(DefId::local(CRATE_DEF_INDEX),
|v| visit::walk_crate(v, krate));
// FIXME(#37713) if foreign items were item likes, could use ItemLikeVisitor
krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
for macro_def in krate.exported_macros.iter() {
visitor.calculate_node_id(macro_def.id,
|v| v.visit_macro_def(macro_def));
}
});
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);

View File

@ -24,6 +24,7 @@ use syntax::ast::{self, Name, NodeId};
use syntax::attr;
use syntax::parse::token;
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
use syntax::tokenstream;
use rustc::hir;
use rustc::hir::*;
use rustc::hir::def::{Def, PathResolution};
@ -769,9 +770,10 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has
debug!("visit_macro_def: st={:?}", self.st);
SawMacroDef.hash(self.st);
hash_attrs!(self, &macro_def.attrs);
for tt in &macro_def.body {
self.hash_token_tree(tt);
}
visit::walk_macro_def(self, macro_def)
// FIXME(mw): We should hash the body of the macro too but we don't
// have a stable way of doing so yet.
}
}
@ -941,4 +943,137 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
self.overflow_checks_enabled = true;
}
}
fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
self.hash_discriminant(tt);
match *tt {
tokenstream::TokenTree::Token(span, ref token) => {
hash_span!(self, span);
self.hash_token(token);
}
tokenstream::TokenTree::Delimited(span, ref delimited) => {
hash_span!(self, span);
let tokenstream::Delimited {
ref delim,
open_span,
ref tts,
close_span,
} = **delimited;
delim.hash(self.st);
hash_span!(self, open_span);
tts.len().hash(self.st);
for sub_tt in tts {
self.hash_token_tree(sub_tt);
}
hash_span!(self, close_span);
}
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
hash_span!(self, span);
let tokenstream::SequenceRepetition {
ref tts,
ref separator,
op,
num_captures,
} = **sequence_repetition;
tts.len().hash(self.st);
for sub_tt in tts {
self.hash_token_tree(sub_tt);
}
self.hash_discriminant(separator);
if let Some(ref separator) = *separator {
self.hash_token(separator);
}
op.hash(self.st);
num_captures.hash(self.st);
}
}
}
fn hash_token(&mut self, token: &token::Token) {
self.hash_discriminant(token);
match *token {
token::Token::Eq |
token::Token::Lt |
token::Token::Le |
token::Token::EqEq |
token::Token::Ne |
token::Token::Ge |
token::Token::Gt |
token::Token::AndAnd |
token::Token::OrOr |
token::Token::Not |
token::Token::Tilde |
token::Token::At |
token::Token::Dot |
token::Token::DotDot |
token::Token::DotDotDot |
token::Token::Comma |
token::Token::Semi |
token::Token::Colon |
token::Token::ModSep |
token::Token::RArrow |
token::Token::LArrow |
token::Token::FatArrow |
token::Token::Pound |
token::Token::Dollar |
token::Token::Question |
token::Token::Underscore |
token::Token::Whitespace |
token::Token::Comment |
token::Token::Eof => {}
token::Token::BinOp(bin_op_token) |
token::Token::BinOpEq(bin_op_token) => bin_op_token.hash(self.st),
token::Token::OpenDelim(delim_token) |
token::Token::CloseDelim(delim_token) => delim_token.hash(self.st),
token::Token::Literal(ref lit, ref opt_name) => {
self.hash_discriminant(lit);
match *lit {
token::Lit::Byte(val) |
token::Lit::Char(val) |
token::Lit::Integer(val) |
token::Lit::Float(val) |
token::Lit::Str_(val) |
token::Lit::ByteStr(val) => val.as_str().hash(self.st),
token::Lit::StrRaw(val, n) |
token::Lit::ByteStrRaw(val, n) => {
val.as_str().hash(self.st);
n.hash(self.st);
}
};
opt_name.map(ast::Name::as_str).hash(self.st);
}
token::Token::Ident(ident) |
token::Token::Lifetime(ident) |
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
token::Token::MatchNt(ident1, ident2) => {
ident1.name.as_str().hash(self.st);
ident2.name.as_str().hash(self.st);
}
token::Token::Interpolated(ref non_terminal) => {
// FIXME(mw): This could be implemented properly. It's just a
// lot of work, since we would need to hash the AST
// in a stable way, in addition to the HIR.
// Since this is hardly used anywhere, just emit a
// warning for now.
if self.tcx.sess.opts.debugging_opts.incremental.is_some() {
let msg = format!("Quasi-quoting might make incremental \
compilation very inefficient: {:?}",
non_terminal);
self.tcx.sess.warn(&msg[..]);
}
non_terminal.hash(self.st);
}
token::Token::DocComment(val) |
token::Token::Shebang(val) => val.as_str().hash(self.st),
}
}
}