Auto merge of #72121 - Aaron1011:final-hygiene-rebase, r=petrochenkov

Serialize span hygiene data

Fixes #68686
Fixes #70963

This PR serializies global hygiene data into both the incremental compilation cache and the crate metadata. This allows hygiene information to be preserved across compilation sessions (both incremental and cross-crate).

When serializing a `SyntaxContext`, we simply write out the raw id from the current compilation session. Whenever we deserialize a `SyntaxContext`, we 'remap' the id to a fresh id in our current compilation session, and load the associated `SyntaxContextData`.

As a result, some 'upstream' `SyntaxContextData` will end up getting duplicated in 'downstream' crates. This only happens when we actually need to use an 'upstream' `SyntaxContext`, which occurs when we deserialize a `Span` that requires it.

We serialize an `ExpnData` into the metadata of the crate which generated it. An `ExpnId` is serialized as a reference into the crate which 'owns' the corresponding `ExpnData`, which avoids duplication in downstream crates.

I've included a macros 2.0 test which requires hygiene serialization to compile successfully.

TODO:

- [x] <strike>Determine how many additional `DefId`s we end up creating for `ExpnId`s - this may be significant for `libcore`, which uses macros heavily. Alternatively, we could try to compute a `DefPathHash` without making a corresponding `DefId` - however, this might significantly complicate the implementation.</strike> (We no longer create `DefId`s)
- [x] Investigate the overhead of duplicating `SyntaxContextData` in crate metadata.
- [x] Investigate how `resolve_crate_root` behaves with deserialized hygiene data - the current logic may be wrong.
- [x] Add additional tests. The effects of this PR are usually only noticeable when working with headache-inducing macro expansions (e.g. macros expanding to macros), so there are lots of corner cases to test.
- [x] Determine what to do about this:

4774f9b523/src/librustc_resolve/build_reduced_graph.rs (L892)

- [x] Determine if we need to do anything here - I think the fact that `src/test/ui/hygiene/cross_crate_hygiene.rs` passes means that this is working.

3d5d0f898c/src/librustc_resolve/imports.rs (L1389-L1392)
This commit is contained in:
bors 2020-07-27 01:54:27 +00:00
commit fa36f96068
46 changed files with 1167 additions and 359 deletions

View File

@ -9,7 +9,8 @@ use rustc_data_structures::thin_vec::ThinVec;
use rustc_errors::struct_span_err;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_span::source_map::{respan, DesugaringKind, ForLoopLoc, Span, Spanned};
use rustc_span::hygiene::ForLoopLoc;
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_target::asm;
use std::collections::hash_map::Entry;

View File

@ -13,7 +13,7 @@ use rustc_data_structures::sync::{self, Lrc};
use rustc_errors::{DiagnosticBuilder, ErrorReported};
use rustc_parse::{self, nt_to_tokenstream, parser, MACRO_ARGUMENTS};
use rustc_session::{parse::ParseSess, Limit};
use rustc_span::def_id::DefId;
use rustc_span::def_id::{DefId, LOCAL_CRATE};
use rustc_span::edition::Edition;
use rustc_span::hygiene::{AstPass, ExpnData, ExpnId, ExpnKind};
use rustc_span::source_map::SourceMap;
@ -873,6 +873,8 @@ impl SyntaxExtension {
local_inner_macros: self.local_inner_macros,
edition: self.edition,
macro_def_id,
krate: LOCAL_CRATE,
orig_id: None,
}
}
}

View File

@ -1,7 +1,7 @@
use crate::base::*;
use crate::config::StripUnconfigured;
use crate::configure;
use crate::hygiene::{ExpnData, ExpnId, ExpnKind, SyntaxContext};
use crate::hygiene::{ExpnData, ExpnKind, SyntaxContext};
use crate::mbe::macro_rules::annotate_err_with_kind;
use crate::module::{parse_external_mod, push_directory, Directory, DirectoryOwnership};
use crate::placeholders::{placeholder, PlaceholderExpander};
@ -28,7 +28,7 @@ use rustc_session::parse::{feature_err, ParseSess};
use rustc_session::Limit;
use rustc_span::source_map::respan;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::{FileName, Span, DUMMY_SP};
use rustc_span::{ExpnId, FileName, Span, DUMMY_SP};
use smallvec::{smallvec, SmallVec};
use std::io::ErrorKind;

View File

@ -307,11 +307,16 @@ impl<'a> CrateLoader<'a> {
let private_dep =
self.sess.opts.externs.get(&name.as_str()).map(|e| e.is_private_dep).unwrap_or(false);
info!("register crate `{}` (private_dep = {})", crate_root.name(), private_dep);
// Claim this crate number and cache it
let cnum = self.cstore.alloc_new_crate_num();
info!(
"register crate `{}` (cnum = {}. private_dep = {})",
crate_root.name(),
cnum,
private_dep
);
// Maintain a reference to the top most crate.
// Stash paths for top-most crate locally if necessary.
let crate_paths;
@ -339,22 +344,21 @@ impl<'a> CrateLoader<'a> {
None
};
self.cstore.set_crate_data(
let crate_metadata = CrateMetadata::new(
self.sess,
metadata,
crate_root,
raw_proc_macros,
cnum,
CrateMetadata::new(
self.sess,
metadata,
crate_root,
raw_proc_macros,
cnum,
cnum_map,
dep_kind,
source,
private_dep,
host_hash,
),
cnum_map,
dep_kind,
source,
private_dep,
host_hash,
);
self.cstore.set_crate_data(cnum, crate_metadata);
Ok(cnum)
}
@ -569,6 +573,8 @@ impl<'a> CrateLoader<'a> {
let cnum = self.maybe_resolve_crate(dep.name, dep_kind, Some((root, &dep)))?;
crate_num_map.push(cnum);
}
debug!("resolve_crate_deps: cnum_map for {:?} is {:?}", krate, crate_num_map);
Ok(crate_num_map)
}

View File

@ -9,6 +9,7 @@
#![feature(proc_macro_internals)]
#![feature(min_specialization)]
#![feature(stmt_expr_attributes)]
#![feature(never_type)]
#![recursion_limit = "256"]
extern crate proc_macro;

View File

@ -32,18 +32,21 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::util::common::record_time;
use rustc_serialize::{opaque, Decodable, Decoder, SpecializedDecoder, UseSpecializedDecodable};
use rustc_session::Session;
use rustc_span::hygiene::ExpnDataDecodeMode;
use rustc_span::source_map::{respan, Spanned};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::{self, hygiene::MacroKind, BytePos, Pos, Span, DUMMY_SP};
use rustc_span::{self, hygiene::MacroKind, BytePos, ExpnId, Pos, Span, SyntaxContext, DUMMY_SP};
use log::debug;
use proc_macro::bridge::client::ProcMacro;
use std::cell::Cell;
use std::io;
use std::mem;
use std::num::NonZeroUsize;
use std::path::Path;
pub use cstore_impl::{provide, provide_extern};
use rustc_span::hygiene::HygieneDecodeContext;
mod cstore_impl;
@ -106,6 +109,13 @@ crate struct CrateMetadata {
/// The hash for the host proc macro. Used to support `-Z dual-proc-macro`.
host_hash: Option<Svh>,
/// Additional data used for decoding `HygieneData` (e.g. `SyntaxContext`
/// and `ExpnId`).
/// Note that we store a `HygieneDecodeContext` for each `CrateMetadat`. This is
/// because `SyntaxContext` ids are not globally unique, so we need
/// to track which ids we've decoded on a per-crate basis.
hygiene_context: HygieneDecodeContext,
// --- Data used only for improving diagnostics ---
/// Information about the `extern crate` item or path that caused this crate to be loaded.
/// If this is `None`, then the crate was injected (e.g., by the allocator).
@ -411,6 +421,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
let lo = BytePos::decode(self)?;
let len = BytePos::decode(self)?;
let ctxt = SyntaxContext::decode(self)?;
let hi = lo + len;
let sess = if let Some(sess) = self.sess {
@ -524,7 +535,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
let hi =
(hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos;
Ok(Span::with_root_ctxt(lo, hi))
Ok(Span::new(lo, hi, ctxt))
}
}
@ -1120,6 +1131,14 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
!self.is_proc_macro(id) && self.root.tables.mir.get(self, id).is_some()
}
fn module_expansion(&self, id: DefIndex, sess: &Session) -> ExpnId {
if let EntryKind::Mod(m) = self.kind(id) {
m.decode((self, sess)).expansion
} else {
panic!("Expected module, found {:?}", self.local_def_id(id))
}
}
fn get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> {
self.root
.tables
@ -1652,6 +1671,7 @@ impl CrateMetadata {
private_dep,
host_hash,
extern_crate: Lock::new(None),
hygiene_context: Default::default(),
}
}
@ -1784,3 +1804,57 @@ fn macro_kind(raw: &ProcMacro) -> MacroKind {
ProcMacro::Bang { .. } => MacroKind::Bang,
}
}
impl<'a, 'tcx> SpecializedDecoder<SyntaxContext> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<SyntaxContext, Self::Error> {
let cdata = self.cdata();
let sess = self.sess.unwrap();
let cname = cdata.root.name;
rustc_span::hygiene::decode_syntax_context(self, &cdata.hygiene_context, |_, id| {
debug!("SpecializedDecoder<SyntaxContext>: decoding {}", id);
Ok(cdata
.root
.syntax_contexts
.get(&cdata, id)
.unwrap_or_else(|| panic!("Missing SyntaxContext {:?} for crate {:?}", id, cname))
.decode((&cdata, sess)))
})
}
}
impl<'a, 'tcx> SpecializedDecoder<ExpnId> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<ExpnId, Self::Error> {
let local_cdata = self.cdata();
let sess = self.sess.unwrap();
let expn_cnum = Cell::new(None);
let get_ctxt = |cnum| {
expn_cnum.set(Some(cnum));
if cnum == LOCAL_CRATE {
&local_cdata.hygiene_context
} else {
&local_cdata.cstore.get_crate_data(cnum).cdata.hygiene_context
}
};
rustc_span::hygiene::decode_expn_id(
self,
ExpnDataDecodeMode::Metadata(get_ctxt),
|_this, index| {
let cnum = expn_cnum.get().unwrap();
// Lookup local `ExpnData`s in our own crate data. Foreign `ExpnData`s
// are stored in the owning crate, to avoid duplication.
let crate_data = if cnum == LOCAL_CRATE {
local_cdata
} else {
local_cdata.cstore.get_crate_data(cnum)
};
Ok(crate_data
.root
.expn_data
.get(&crate_data, index)
.unwrap()
.decode((&crate_data, sess)))
},
)
}
}

View File

@ -21,9 +21,10 @@ use rustc_middle::ty::{self, TyCtxt};
use rustc_session::utils::NativeLibKind;
use rustc_session::{CrateDisambiguator, Session};
use rustc_span::source_map::{self, Span, Spanned};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::symbol::Symbol;
use rustc_data_structures::sync::Lrc;
use rustc_span::ExpnId;
use smallvec::SmallVec;
use std::any::Any;
@ -417,13 +418,7 @@ impl CStore {
attr::mark_used(attr);
}
let ident = data
.def_key(id.index)
.disambiguated_data
.data
.get_opt_name()
.map(Ident::with_dummy_span) // FIXME: cross-crate hygiene
.expect("no name in load_macro");
let ident = data.item_ident(id.index, sess);
LoadedMacro::MacroDef(
ast::Item {
@ -454,6 +449,10 @@ impl CStore {
pub fn item_generics_num_lifetimes(&self, def_id: DefId, sess: &Session) -> usize {
self.get_crate_data(def_id.krate).get_generics(def_id.index, sess).own_counts().lifetimes
}
pub fn module_expansion_untracked(&self, def_id: DefId, sess: &Session) -> ExpnId {
self.get_crate_data(def_id.krate).module_expansion(def_id.index, sess)
}
}
impl CrateStore for CStore {

View File

@ -1,4 +1,4 @@
use crate::rmeta::table::FixedSizeEncoding;
use crate::rmeta::table::{FixedSizeEncoding, TableBuilder};
use crate::rmeta::*;
use log::{debug, trace};
@ -30,15 +30,16 @@ use rustc_middle::ty::codec::{self as ty_codec, TyEncoder};
use rustc_middle::ty::{self, SymbolName, Ty, TyCtxt};
use rustc_serialize::{opaque, Encodable, Encoder, SpecializedEncoder, UseSpecializedEncodable};
use rustc_session::config::CrateType;
use rustc_span::hygiene::{ExpnDataEncodeMode, HygieneEncodeContext};
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::{self, ExternalSource, FileName, SourceFile, Span};
use rustc_span::{self, ExternalSource, FileName, SourceFile, Span, SyntaxContext};
use rustc_target::abi::VariantIdx;
use std::hash::Hash;
use std::num::NonZeroUsize;
use std::path::Path;
struct EncodeContext<'tcx> {
struct EncodeContext<'a, 'tcx> {
opaque: opaque::Encoder,
tcx: TyCtxt<'tcx>,
@ -66,6 +67,7 @@ struct EncodeContext<'tcx> {
// with a result containing a foreign `Span`.
required_source_files: Option<GrowableBitSet<usize>>,
is_proc_macro: bool,
hygiene_ctxt: &'a HygieneEncodeContext,
}
macro_rules! encoder_methods {
@ -76,7 +78,7 @@ macro_rules! encoder_methods {
}
}
impl<'tcx> Encoder for EncodeContext<'tcx> {
impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
type Error = <opaque::Encoder as Encoder>::Error;
#[inline]
@ -107,13 +109,13 @@ impl<'tcx> Encoder for EncodeContext<'tcx> {
}
}
impl<'tcx, T> SpecializedEncoder<Lazy<T, ()>> for EncodeContext<'tcx> {
impl<'a, 'tcx, T> SpecializedEncoder<Lazy<T, ()>> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, lazy: &Lazy<T>) -> Result<(), Self::Error> {
self.emit_lazy_distance(*lazy)
}
}
impl<'tcx, T> SpecializedEncoder<Lazy<[T], usize>> for EncodeContext<'tcx> {
impl<'a, 'tcx, T> SpecializedEncoder<Lazy<[T], usize>> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, lazy: &Lazy<[T]>) -> Result<(), Self::Error> {
self.emit_usize(lazy.meta)?;
if lazy.meta == 0 {
@ -123,7 +125,7 @@ impl<'tcx, T> SpecializedEncoder<Lazy<[T], usize>> for EncodeContext<'tcx> {
}
}
impl<'tcx, I: Idx, T> SpecializedEncoder<Lazy<Table<I, T>, usize>> for EncodeContext<'tcx>
impl<'a, 'tcx, I: Idx, T> SpecializedEncoder<Lazy<Table<I, T>, usize>> for EncodeContext<'a, 'tcx>
where
Option<T>: FixedSizeEncoding,
{
@ -133,14 +135,14 @@ where
}
}
impl<'tcx> SpecializedEncoder<CrateNum> for EncodeContext<'tcx> {
impl<'a, 'tcx> SpecializedEncoder<CrateNum> for EncodeContext<'a, 'tcx> {
#[inline]
fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> {
self.emit_u32(cnum.as_u32())
}
}
impl<'tcx> SpecializedEncoder<DefId> for EncodeContext<'tcx> {
impl<'a, 'tcx> SpecializedEncoder<DefId> for EncodeContext<'a, 'tcx> {
#[inline]
fn specialized_encode(&mut self, def_id: &DefId) -> Result<(), Self::Error> {
let DefId { krate, index } = *def_id;
@ -150,14 +152,31 @@ impl<'tcx> SpecializedEncoder<DefId> for EncodeContext<'tcx> {
}
}
impl<'tcx> SpecializedEncoder<DefIndex> for EncodeContext<'tcx> {
impl<'a, 'tcx> SpecializedEncoder<SyntaxContext> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, ctxt: &SyntaxContext) -> Result<(), Self::Error> {
rustc_span::hygiene::raw_encode_syntax_context(*ctxt, &self.hygiene_ctxt, self)
}
}
impl<'a, 'tcx> SpecializedEncoder<ExpnId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, expn: &ExpnId) -> Result<(), Self::Error> {
rustc_span::hygiene::raw_encode_expn_id(
*expn,
&mut self.hygiene_ctxt,
ExpnDataEncodeMode::Metadata,
self,
)
}
}
impl<'a, 'tcx> SpecializedEncoder<DefIndex> for EncodeContext<'a, 'tcx> {
#[inline]
fn specialized_encode(&mut self, def_index: &DefIndex) -> Result<(), Self::Error> {
self.emit_u32(def_index.as_u32())
}
}
impl<'tcx> SpecializedEncoder<Span> for EncodeContext<'tcx> {
impl<'a, 'tcx> SpecializedEncoder<Span> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
if span.is_dummy() {
return TAG_INVALID_SPAN.encode(self);
@ -234,26 +253,58 @@ impl<'tcx> SpecializedEncoder<Span> for EncodeContext<'tcx> {
let len = hi - lo;
len.encode(self)?;
// Don't serialize any `SyntaxContext`s from a proc-macro crate,
// since we don't load proc-macro dependencies during serialization.
// This means that any hygiene information from macros used *within*
// a proc-macro crate (e.g. invoking a macro that expands to a proc-macro
// definition) will be lost.
//
// This can show up in two ways:
//
// 1. Any hygiene information associated with identifier of
// a proc macro (e.g. `#[proc_macro] pub fn $name`) will be lost.
// Since proc-macros can only be invoked from a different crate,
// real code should never need to care about this.
//
// 2. Using `Span::def_site` or `Span::mixed_site` will not
// include any hygiene information associated with the defintion
// site. This means that a proc-macro cannot emit a `$crate`
// identifier which resolves to one of its dependencies,
// which also should never come up in practice.
//
// Additionally, this affects `Span::parent`, and any other
// span inspection APIs that would otherwise allow traversing
// the `SyntaxContexts` associated with a span.
//
// None of these user-visible effects should result in any
// cross-crate inconsistencies (getting one behavior in the same
// crate, and a different behavior in another crate) due to the
// limited surface that proc-macros can expose.
if self.is_proc_macro {
SyntaxContext::root().encode(self)?;
} else {
span.ctxt.encode(self)?;
}
if tag == TAG_VALID_SPAN_FOREIGN {
// This needs to be two lines to avoid holding the `self.source_file_cache`
// while calling `cnum.encode(self)`
let cnum = self.source_file_cache.0.cnum;
cnum.encode(self)?;
}
Ok(())
// Don't encode the expansion context.
Ok(())
}
}
impl<'tcx> SpecializedEncoder<LocalDefId> for EncodeContext<'tcx> {
impl<'a, 'tcx> SpecializedEncoder<LocalDefId> for EncodeContext<'a, 'tcx> {
#[inline]
fn specialized_encode(&mut self, def_id: &LocalDefId) -> Result<(), Self::Error> {
self.specialized_encode(&def_id.to_def_id())
}
}
impl<'a, 'b, 'tcx> SpecializedEncoder<&'a ty::TyS<'b>> for EncodeContext<'tcx>
impl<'a, 'b, 'c, 'tcx> SpecializedEncoder<&'a ty::TyS<'b>> for EncodeContext<'c, 'tcx>
where
&'a ty::TyS<'b>: UseSpecializedEncodable,
{
@ -264,7 +315,7 @@ where
}
}
impl<'b, 'tcx> SpecializedEncoder<ty::Predicate<'b>> for EncodeContext<'tcx> {
impl<'a, 'b, 'tcx> SpecializedEncoder<ty::Predicate<'b>> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, predicate: &ty::Predicate<'b>) -> Result<(), Self::Error> {
debug_assert!(self.tcx.lift(predicate).is_some());
let predicate =
@ -275,7 +326,7 @@ impl<'b, 'tcx> SpecializedEncoder<ty::Predicate<'b>> for EncodeContext<'tcx> {
}
}
impl<'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'tcx> {
impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry;
let index = match self.interpret_allocs.entry(*alloc_id) {
@ -292,13 +343,13 @@ impl<'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'tcx> {
}
}
impl<'tcx> SpecializedEncoder<Fingerprint> for EncodeContext<'tcx> {
impl<'a, 'tcx> SpecializedEncoder<Fingerprint> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
f.encode_opaque(&mut self.opaque)
}
}
impl<'tcx, T> SpecializedEncoder<mir::ClearCrossCrate<T>> for EncodeContext<'tcx>
impl<'a, 'tcx, T> SpecializedEncoder<mir::ClearCrossCrate<T>> for EncodeContext<'a, 'tcx>
where
mir::ClearCrossCrate<T>: UseSpecializedEncodable,
{
@ -307,7 +358,7 @@ where
}
}
impl<'tcx> TyEncoder for EncodeContext<'tcx> {
impl<'a, 'tcx> TyEncoder for EncodeContext<'a, 'tcx> {
fn position(&self) -> usize {
self.opaque.position()
}
@ -315,17 +366,17 @@ impl<'tcx> TyEncoder for EncodeContext<'tcx> {
/// Helper trait to allow overloading `EncodeContext::lazy` for iterators.
trait EncodeContentsForLazy<T: ?Sized + LazyMeta> {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) -> T::Meta;
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) -> T::Meta;
}
impl<T: Encodable> EncodeContentsForLazy<T> for &T {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) {
self.encode(ecx).unwrap()
}
}
impl<T: Encodable> EncodeContentsForLazy<T> for T {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) {
self.encode(ecx).unwrap()
}
}
@ -335,7 +386,7 @@ where
I: IntoIterator,
I::Item: EncodeContentsForLazy<T>,
{
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) -> usize {
fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) -> usize {
self.into_iter().map(|value| value.encode_contents_for_lazy(ecx)).count()
}
}
@ -352,7 +403,7 @@ macro_rules! record {
}};
}
impl<'tcx> EncodeContext<'tcx> {
impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn emit_lazy_distance<T: ?Sized + LazyMeta>(
&mut self,
lazy: Lazy<T>,
@ -478,6 +529,7 @@ impl<'tcx> EncodeContext<'tcx> {
let mut i = self.position();
// Encode the crate deps
let crate_deps = self.encode_crate_deps();
let dylib_dependency_formats = self.encode_dylib_dependency_formats();
let dep_bytes = self.position() - i;
@ -556,12 +608,23 @@ impl<'tcx> EncodeContext<'tcx> {
let proc_macro_data_bytes = self.position() - i;
// Encode exported symbols info. This is prefetched in `encode_metadata` so we encode
// this late to give the prefetching as much time as possible to complete.
// this as late as possible to give the prefetching as much time as possible to complete.
i = self.position();
let exported_symbols = tcx.exported_symbols(LOCAL_CRATE);
let exported_symbols = self.encode_exported_symbols(&exported_symbols);
let exported_symbols_bytes = self.position() - i;
// Encode the hygiene data,
// IMPORTANT: this *must* be the last thing that we encode (other than `SourceMap`). The process
// of encoding other items (e.g. `optimized_mir`) may cause us to load
// data from the incremental cache. If this causes us to deserialize a `Span`,
// then we may load additional `SyntaxContext`s into the global `HygieneData`.
// Therefore, we need to encode the hygiene data last to ensure that we encode
// any `SyntaxContext`s that might be used.
i = self.position();
let (syntax_contexts, expn_data) = self.encode_hygiene();
let hygiene_bytes = self.position() - i;
// Encode source_map. This needs to be done last,
// since encoding `Span`s tells us which `SourceFiles` we actually
// need to encode.
@ -618,6 +681,8 @@ impl<'tcx> EncodeContext<'tcx> {
exported_symbols,
interpret_alloc_index,
tables,
syntax_contexts,
expn_data,
});
let total_bytes = self.position();
@ -643,6 +708,7 @@ impl<'tcx> EncodeContext<'tcx> {
println!(" proc-macro-data-bytes: {}", proc_macro_data_bytes);
println!(" item bytes: {}", item_bytes);
println!(" table bytes: {}", tables_bytes);
println!(" hygiene bytes: {}", hygiene_bytes);
println!(" zero bytes: {}", zero_bytes);
println!(" total bytes: {}", total_bytes);
}
@ -651,7 +717,7 @@ impl<'tcx> EncodeContext<'tcx> {
}
}
impl EncodeContext<'tcx> {
impl EncodeContext<'a, 'tcx> {
fn encode_variances_of(&mut self, def_id: DefId) {
debug!("EncodeContext::encode_variances_of({:?})", def_id);
record!(self.tables.variances[def_id] <- &self.tcx.variances_of(def_id)[..]);
@ -752,11 +818,12 @@ impl EncodeContext<'tcx> {
vis: &hir::Visibility<'_>,
) {
let tcx = self.tcx;
let def_id = tcx.hir().local_def_id(id);
let local_def_id = tcx.hir().local_def_id(id);
let def_id = local_def_id.to_def_id();
debug!("EncodeContext::encode_info_for_mod({:?})", def_id);
let data = ModData {
reexports: match tcx.module_exports(def_id) {
reexports: match tcx.module_exports(local_def_id) {
Some(exports) => {
let hir_map = self.tcx.hir();
self.lazy(
@ -767,10 +834,9 @@ impl EncodeContext<'tcx> {
}
_ => Lazy::empty(),
},
expansion: tcx.hir().definitions().expansion_that_defined(local_def_id),
};
let def_id = def_id.to_def_id();
record!(self.tables.kind[def_id] <- EntryKind::Mod(self.lazy(data)));
record!(self.tables.visibility[def_id] <- ty::Visibility::from_hir(vis, id, self.tcx));
record!(self.tables.span[def_id] <- self.tcx.def_span(def_id));
@ -1425,6 +1491,25 @@ impl EncodeContext<'tcx> {
self.lazy(foreign_modules.iter().cloned())
}
fn encode_hygiene(&mut self) -> (SyntaxContextTable, ExpnDataTable) {
let mut syntax_contexts: TableBuilder<_, _> = Default::default();
let mut expn_data_table: TableBuilder<_, _> = Default::default();
let _: Result<(), !> = self.hygiene_ctxt.encode(
&mut (&mut *self, &mut syntax_contexts, &mut expn_data_table),
|(this, syntax_contexts, _), index, ctxt_data| {
syntax_contexts.set(index, this.lazy(ctxt_data));
Ok(())
},
|(this, _, expn_data_table), index, expn_data| {
expn_data_table.set(index, this.lazy(expn_data));
Ok(())
},
);
(syntax_contexts.encode(&mut self.opaque), expn_data_table.encode(&mut self.opaque))
}
fn encode_proc_macros(&mut self) -> Option<Lazy<[DefIndex]>> {
let is_proc_macro = self.tcx.sess.crate_types().contains(&CrateType::ProcMacro);
if is_proc_macro {
@ -1614,7 +1699,7 @@ impl EncodeContext<'tcx> {
}
// FIXME(eddyb) make metadata encoding walk over all definitions, instead of HIR.
impl Visitor<'tcx> for EncodeContext<'tcx> {
impl Visitor<'tcx> for EncodeContext<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
@ -1652,7 +1737,7 @@ impl Visitor<'tcx> for EncodeContext<'tcx> {
}
}
impl EncodeContext<'tcx> {
impl EncodeContext<'a, 'tcx> {
fn encode_fields(&mut self, adt_def: &ty::AdtDef) {
for (variant_index, variant) in adt_def.variants.iter_enumerated() {
for (field_index, _field) in variant.fields.iter().enumerate() {
@ -1906,6 +1991,7 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata {
encoder.emit_raw_bytes(&[0, 0, 0, 0]);
let source_map_files = tcx.sess.source_map().files();
let hygiene_ctxt = HygieneEncodeContext::default();
let mut ecx = EncodeContext {
opaque: encoder,
@ -1919,6 +2005,7 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata {
interpret_allocs_inverse: Default::default(),
required_source_files: Some(GrowableBitSet::with_capacity(source_map_files.len())),
is_proc_macro: tcx.sess.crate_types().contains(&CrateType::ProcMacro),
hygiene_ctxt: &hygiene_ctxt,
};
drop(source_map_files);

View File

@ -20,7 +20,7 @@ use rustc_session::config::SymbolManglingVersion;
use rustc_session::CrateDisambiguator;
use rustc_span::edition::Edition;
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{self, Span};
use rustc_span::{self, ExpnData, ExpnId, Span};
use rustc_target::spec::{PanicStrategy, TargetTriple};
use std::marker::PhantomData;
@ -28,6 +28,7 @@ use std::num::NonZeroUsize;
pub use decoder::{provide, provide_extern};
crate use decoder::{CrateMetadata, CrateNumMap, MetadataBlob};
use rustc_span::hygiene::SyntaxContextData;
mod decoder;
mod encoder;
@ -168,6 +169,9 @@ macro_rules! Lazy {
($T:ty) => {Lazy<$T, ()>};
}
type SyntaxContextTable = Lazy<Table<u32, Lazy<SyntaxContextData>>>;
type ExpnDataTable = Lazy<Table<u32, Lazy<ExpnData>>>;
#[derive(RustcEncodable, RustcDecodable)]
crate struct CrateRoot<'tcx> {
name: Symbol,
@ -202,6 +206,10 @@ crate struct CrateRoot<'tcx> {
proc_macro_data: Option<Lazy<[DefIndex]>>,
exported_symbols: Lazy!([(ExportedSymbol<'tcx>, SymbolExportLevel)]),
syntax_contexts: SyntaxContextTable,
expn_data: ExpnDataTable,
source_map: Lazy<[rustc_span::SourceFile]>,
compiler_builtins: bool,
@ -322,6 +330,7 @@ struct RenderedConst(String);
#[derive(RustcEncodable, RustcDecodable)]
struct ModData {
reexports: Lazy<[Export<hir::HirId>]>,
expansion: ExpnId,
}
#[derive(RustcEncodable, RustcDecodable)]

View File

@ -155,7 +155,7 @@ impl<I: Idx, T> TableBuilder<I, T>
where
Option<T>: FixedSizeEncoding,
{
pub(super) fn set(&mut self, i: I, value: T) {
pub(crate) fn set(&mut self, i: I, value: T) {
// FIXME(eddyb) investigate more compact encodings for sparse tables.
// On the PR @michaelwoerister mentioned:
// > Space requirements could perhaps be optimized by using the HAMT `popcnt`
@ -170,7 +170,7 @@ where
Some(value).write_to_bytes_at(&mut self.bytes, i);
}
pub(super) fn encode(&self, buf: &mut Encoder) -> Lazy<Table<I, T>> {
pub(crate) fn encode(&self, buf: &mut Encoder) -> Lazy<Table<I, T>> {
let pos = buf.position();
buf.emit_raw_bytes(&self.bytes);
Lazy::from_position_and_meta(NonZeroUsize::new(pos as usize).unwrap(), self.bytes.len())

View File

@ -14,6 +14,7 @@ use rustc_span::source_map::SourceMap;
use rustc_span::symbol::Symbol;
use rustc_span::{BytePos, CachingSourceMapView, SourceFile};
use rustc_span::def_id::{CrateNum, CRATE_DEF_INDEX};
use smallvec::SmallVec;
use std::cmp::Ord;
@ -229,6 +230,12 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
self.hash_spans
}
#[inline]
fn hash_crate_num(&mut self, cnum: CrateNum, hasher: &mut StableHasher) {
let hcx = self;
hcx.def_path_hash(DefId { krate: cnum, index: CRATE_DEF_INDEX }).hash_stable(hcx, hasher);
}
#[inline]
fn hash_def_id(&mut self, def_id: DefId, hasher: &mut StableHasher) {
let hcx = self;

View File

@ -147,13 +147,6 @@ impl<'a> ToStableHashKey<StableHashingContext<'a>> for LocalDefId {
}
}
impl<'a> HashStable<StableHashingContext<'a>> for CrateNum {
#[inline]
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
hcx.def_path_hash(DefId { krate: *self, index: CRATE_DEF_INDEX }).hash_stable(hcx, hasher);
}
}
impl<'a> ToStableHashKey<StableHashingContext<'a>> for CrateNum {
type KeyType = DefPathHash;

View File

@ -346,6 +346,6 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool {
// Dummy span for the `def_site` means it's an external macro.
expn_data.def_site.is_dummy() || sess.source_map().is_imported(expn_data.def_site)
}
ExpnKind::Macro(..) => true, // definitely a plugin
ExpnKind::Macro { .. } => true, // definitely a plugin
}
}

View File

@ -17,22 +17,24 @@ use rustc_serialize::{
UseSpecializedDecodable, UseSpecializedEncodable,
};
use rustc_session::{CrateDisambiguator, Session};
use rustc_span::hygiene::{ExpnId, SyntaxContext};
use rustc_span::hygiene::{
ExpnDataDecodeMode, ExpnDataEncodeMode, ExpnId, HygieneDecodeContext, HygieneEncodeContext,
SyntaxContext, SyntaxContextData,
};
use rustc_span::source_map::{SourceMap, StableSourceFileId};
use rustc_span::symbol::Ident;
use rustc_span::CachingSourceMapView;
use rustc_span::{BytePos, SourceFile, Span, DUMMY_SP};
use rustc_span::{BytePos, ExpnData, SourceFile, Span, DUMMY_SP};
use std::mem;
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
const TAG_NO_EXPN_DATA: u8 = 0;
const TAG_EXPN_DATA_SHORTHAND: u8 = 1;
const TAG_EXPN_DATA_INLINE: u8 = 2;
const TAG_VALID_SPAN: u8 = 0;
const TAG_INVALID_SPAN: u8 = 1;
const TAG_SYNTAX_CONTEXT: u8 = 0;
const TAG_EXPN_DATA: u8 = 1;
/// Provides an interface to incremental compilation data cached from the
/// previous compilation session. This data will eventually include the results
/// of a few selected queries (like `typeck` and `mir_optimized`) and
@ -53,7 +55,6 @@ pub struct OnDiskCache<'sess> {
// Caches that are populated lazily during decoding.
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
synthetic_syntax_contexts: Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
// A map from dep-node to the position of the cached query result in
// `serialized_data`.
@ -64,9 +65,28 @@ pub struct OnDiskCache<'sess> {
prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
alloc_decoding_state: AllocDecodingState,
// A map from syntax context ids to the position of their associated
// `SyntaxContextData`. We use a `u32` instead of a `SyntaxContext`
// to represent the fact that we are storing *encoded* ids. When we decode
// a `SyntaxContext`, a new id will be allocated from the global `HygieneData`,
// which will almost certainly be different than the serialized id.
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
// A map from the `DefPathHash` of an `ExpnId` to the position
// of their associated `ExpnData`. Ideally, we would store a `DefId`,
// but we need to decode this before we've constructed a `TyCtxt` (which
// makes it difficult to decode a `DefId`).
// Note that these `DefPathHashes` correspond to both local and foreign
// `ExpnData` (e.g `ExpnData.krate` may not be `LOCAL_CRATE`). Alternatively,
// we could look up the `ExpnData` from the metadata of foreign crates,
// but it seemed easier to have `OnDiskCache` be independent of the `CStore`.
expn_data: FxHashMap<u32, AbsoluteBytePos>,
// Additional information used when decoding hygiene data.
hygiene_context: HygieneDecodeContext,
}
// This type is used only for (de-)serialization.
// This type is used only for serialization and deserialization.
#[derive(RustcEncodable, RustcDecodable)]
struct Footer {
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
@ -75,6 +95,10 @@ struct Footer {
diagnostics_index: EncodedQueryResultIndex,
// The location of all allocations.
interpret_alloc_index: Vec<u32>,
// See `OnDiskCache.syntax_contexts`
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
// See `OnDiskCache.expn_data`
expn_data: FxHashMap<u32, AbsoluteBytePos>,
}
type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
@ -116,6 +140,7 @@ impl<'sess> OnDiskCache<'sess> {
// Decode the file footer, which contains all the lookup tables, etc.
decoder.set_position(footer_pos);
decode_tagged(&mut decoder, TAG_FILE_FOOTER)
.expect("error while trying to decode footer position")
};
@ -130,8 +155,10 @@ impl<'sess> OnDiskCache<'sess> {
current_diagnostics: Default::default(),
query_result_index: footer.query_result_index.into_iter().collect(),
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
synthetic_syntax_contexts: Default::default(),
alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
syntax_contexts: footer.syntax_contexts,
expn_data: footer.expn_data,
hygiene_context: Default::default(),
}
}
@ -146,8 +173,10 @@ impl<'sess> OnDiskCache<'sess> {
current_diagnostics: Default::default(),
query_result_index: Default::default(),
prev_diagnostics_index: Default::default(),
synthetic_syntax_contexts: Default::default(),
alloc_decoding_state: AllocDecodingState::new(Vec::new()),
syntax_contexts: FxHashMap::default(),
expn_data: FxHashMap::default(),
hygiene_context: Default::default(),
}
}
@ -175,16 +204,18 @@ impl<'sess> OnDiskCache<'sess> {
(file_to_file_index, file_index_to_stable_id)
};
let hygiene_encode_context = HygieneEncodeContext::default();
let mut encoder = CacheEncoder {
tcx,
encoder,
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
expn_data_shorthands: Default::default(),
interpret_allocs: Default::default(),
interpret_allocs_inverse: Vec::new(),
source_map: CachingSourceMapView::new(tcx.sess.source_map()),
file_to_file_index,
hygiene_context: &hygiene_encode_context,
};
// Load everything into memory so we can write it out to the on-disk
@ -264,7 +295,29 @@ impl<'sess> OnDiskCache<'sess> {
})
.collect();
// Encode the file footer.
let mut syntax_contexts = FxHashMap::default();
let mut expn_ids = FxHashMap::default();
// Encode all hygiene data (`SyntaxContextData` and `ExpnData`) from the current
// session.
hygiene_encode_context.encode(
&mut encoder,
|encoder, index, ctxt_data| {
let pos = AbsoluteBytePos::new(encoder.position());
encoder.encode_tagged(TAG_SYNTAX_CONTEXT, ctxt_data)?;
syntax_contexts.insert(index, pos);
Ok(())
},
|encoder, index, expn_data| {
let pos = AbsoluteBytePos::new(encoder.position());
encoder.encode_tagged(TAG_EXPN_DATA, expn_data)?;
expn_ids.insert(index, pos);
Ok(())
},
)?;
// `Encode the file footer.
let footer_pos = encoder.position() as u64;
encoder.encode_tagged(
TAG_FILE_FOOTER,
@ -274,6 +327,8 @@ impl<'sess> OnDiskCache<'sess> {
query_result_index,
diagnostics_index,
interpret_alloc_index,
syntax_contexts,
expn_data: expn_ids,
},
)?;
@ -367,6 +422,21 @@ impl<'sess> OnDiskCache<'sess> {
{
let pos = index.get(&dep_node_index).cloned()?;
self.with_decoder(tcx, pos, |decoder| match decode_tagged(decoder, dep_node_index) {
Ok(v) => Some(v),
Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
})
}
fn with_decoder<'tcx, T, F: FnOnce(&mut CacheDecoder<'sess, 'tcx>) -> T>(
&'sess self,
tcx: TyCtxt<'tcx>,
pos: AbsoluteBytePos,
f: F,
) -> T
where
T: Decodable,
{
let cnum_map =
self.cnum_map.get_or_init(|| Self::compute_cnum_map(tcx, &self.prev_cnums[..]));
@ -375,16 +445,14 @@ impl<'sess> OnDiskCache<'sess> {
opaque: opaque::Decoder::new(&self.serialized_data[..], pos.to_usize()),
source_map: self.source_map,
cnum_map,
synthetic_syntax_contexts: &self.synthetic_syntax_contexts,
file_index_to_file: &self.file_index_to_file,
file_index_to_stable_id: &self.file_index_to_stable_id,
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
syntax_contexts: &self.syntax_contexts,
expn_data: &self.expn_data,
hygiene_context: &self.hygiene_context,
};
match decode_tagged(&mut decoder, dep_node_index) {
Ok(v) => Some(v),
Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
}
f(&mut decoder)
}
// This function builds mapping from previous-session-`CrateNum` to
@ -430,10 +498,12 @@ struct CacheDecoder<'a, 'tcx> {
opaque: opaque::Decoder<'a>,
source_map: &'a SourceMap,
cnum_map: &'a IndexVec<CrateNum, Option<CrateNum>>,
synthetic_syntax_contexts: &'a Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, StableSourceFileId>,
alloc_decoding_session: AllocDecodingSession<'a>,
syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
expn_data: &'a FxHashMap<u32, AbsoluteBytePos>,
hygiene_context: &'a HygieneDecodeContext,
}
impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
@ -577,6 +647,43 @@ impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
implement_ty_decoder!(CacheDecoder<'a, 'tcx>);
impl<'a, 'tcx> SpecializedDecoder<SyntaxContext> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<SyntaxContext, Self::Error> {
let syntax_contexts = self.syntax_contexts;
rustc_span::hygiene::decode_syntax_context(self, self.hygiene_context, |this, id| {
// This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing.
// We look up the position of the associated `SyntaxData` and decode it.
let pos = syntax_contexts.get(&id).unwrap();
this.with_position(pos.to_usize(), |decoder| {
let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT)?;
Ok(data)
})
})
}
}
impl<'a, 'tcx> SpecializedDecoder<ExpnId> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<ExpnId, Self::Error> {
let expn_data = self.expn_data;
rustc_span::hygiene::decode_expn_id(
self,
ExpnDataDecodeMode::incr_comp(self.hygiene_context),
|this, index| {
// This closure is invoked if we haven't already decoded the data for the `ExpnId` we are deserializing.
// We look up the position of the associated `ExpnData` and decode it.
let pos = expn_data
.get(&index)
.unwrap_or_else(|| panic!("Bad index {:?} (map {:?})", index, expn_data));
this.with_position(pos.to_usize(), |decoder| {
let data: ExpnData = decode_tagged(decoder, TAG_EXPN_DATA)?;
Ok(data)
})
},
)
}
}
impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
let alloc_decoding_session = self.alloc_decoding_session;
@ -598,48 +705,13 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
let line_lo = usize::decode(self)?;
let col_lo = BytePos::decode(self)?;
let len = BytePos::decode(self)?;
let ctxt = SyntaxContext::decode(self)?;
let file_lo = self.file_index_to_file(file_lo_index);
let lo = file_lo.lines[line_lo - 1] + col_lo;
let hi = lo + len;
let expn_data_tag = u8::decode(self)?;
// FIXME(mw): This method does not restore `ExpnData::parent` or
// `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things
// don't seem to be used after HIR lowering, so everything should be fine
// until we want incremental compilation to serialize Spans that we need
// full hygiene information for.
let location = || Span::with_root_ctxt(lo, hi);
let recover_from_expn_data = |this: &Self, expn_data, transparency, pos| {
let span = location().fresh_expansion_with_transparency(expn_data, transparency);
this.synthetic_syntax_contexts.borrow_mut().insert(pos, span.ctxt());
span
};
Ok(match expn_data_tag {
TAG_NO_EXPN_DATA => location(),
TAG_EXPN_DATA_INLINE => {
let (expn_data, transparency) = Decodable::decode(self)?;
recover_from_expn_data(
self,
expn_data,
transparency,
AbsoluteBytePos::new(self.opaque.position()),
)
}
TAG_EXPN_DATA_SHORTHAND => {
let pos = AbsoluteBytePos::decode(self)?;
let cached_ctxt = self.synthetic_syntax_contexts.borrow().get(&pos).cloned();
if let Some(ctxt) = cached_ctxt {
Span::new(lo, hi, ctxt)
} else {
let (expn_data, transparency) =
self.with_position(pos.to_usize(), |this| Decodable::decode(this))?;
recover_from_expn_data(self, expn_data, transparency, pos)
}
}
_ => unreachable!(),
})
Ok(Span::new(lo, hi, ctxt))
}
}
@ -695,11 +767,11 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
encoder: &'a mut E,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
expn_data_shorthands: FxHashMap<ExpnId, AbsoluteBytePos>,
interpret_allocs: FxHashMap<interpret::AllocId, usize>,
interpret_allocs_inverse: Vec<interpret::AllocId>,
source_map: CachingSourceMapView<'tcx>,
file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>,
hygiene_context: &'a HygieneEncodeContext,
}
impl<'a, 'tcx, E> CacheEncoder<'a, 'tcx, E>
@ -750,6 +822,29 @@ where
}
}
impl<'a, 'tcx, E> SpecializedEncoder<SyntaxContext> for CacheEncoder<'a, 'tcx, E>
where
E: 'a + TyEncoder,
{
fn specialized_encode(&mut self, ctxt: &SyntaxContext) -> Result<(), Self::Error> {
rustc_span::hygiene::raw_encode_syntax_context(*ctxt, self.hygiene_context, self)
}
}
impl<'a, 'tcx, E> SpecializedEncoder<ExpnId> for CacheEncoder<'a, 'tcx, E>
where
E: 'a + TyEncoder,
{
fn specialized_encode(&mut self, expn: &ExpnId) -> Result<(), Self::Error> {
rustc_span::hygiene::raw_encode_expn_id(
*expn,
self.hygiene_context,
ExpnDataEncodeMode::IncrComp,
self,
)
}
}
impl<'a, 'tcx, E> SpecializedEncoder<Span> for CacheEncoder<'a, 'tcx, E>
where
E: 'a + TyEncoder,
@ -779,21 +874,8 @@ where
line_lo.encode(self)?;
col_lo.encode(self)?;
len.encode(self)?;
if span_data.ctxt == SyntaxContext::root() {
TAG_NO_EXPN_DATA.encode(self)
} else {
let (expn_id, transparency, expn_data) = span_data.ctxt.outer_mark_with_data();
if let Some(pos) = self.expn_data_shorthands.get(&expn_id).cloned() {
TAG_EXPN_DATA_SHORTHAND.encode(self)?;
pos.encode(self)
} else {
TAG_EXPN_DATA_INLINE.encode(self)?;
let pos = AbsoluteBytePos::new(self.position());
self.expn_data_shorthands.insert(expn_id, pos);
(expn_data, transparency).encode(self)
}
}
span_data.ctxt.encode(self)?;
Ok(())
}
}

View File

@ -35,7 +35,7 @@ use rustc_middle::ty;
use rustc_span::hygiene::{ExpnId, MacroKind};
use rustc_span::source_map::{respan, Spanned};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use rustc_span::Span;
use log::debug;
use std::cell::Cell;
@ -130,8 +130,8 @@ impl<'a> Resolver<'a> {
parent,
kind,
def_id,
ExpnId::root(),
DUMMY_SP,
self.cstore().module_expansion_untracked(def_id, &self.session),
self.cstore().get_span_untracked(def_id, &self.session),
));
self.extern_module_map.insert(def_id, module);
module
@ -888,7 +888,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
fn build_reduced_graph_for_external_crate_res(&mut self, child: Export<NodeId>) {
let parent = self.parent_scope.module;
let Export { ident, res, vis, span } = child;
let expansion = ExpnId::root(); // FIXME(jseyfried) intercrate hygiene
let expansion = self.parent_scope.expansion;
// Record primary definitions.
match res {
Res::Def(kind @ (DefKind::Mod | DefKind::Enum | DefKind::Trait), def_id) => {

View File

@ -434,7 +434,7 @@ impl ModuleKind {
///
/// Multiple bindings in the same module can have the same key (in a valid
/// program) if all but one of them come from glob imports.
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct BindingKey {
/// The identifier for the binding, aways the `normalize_to_macros_2_0` version of the
/// identifier.
@ -1988,6 +1988,7 @@ impl<'a> Resolver<'a> {
}
fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
debug!("resolve_crate_root({:?})", ident);
let mut ctxt = ident.span.ctxt();
let mark = if ident.name == kw::DollarCrate {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
@ -1997,6 +1998,10 @@ impl<'a> Resolver<'a> {
// definitions actually produced by `macro` and `macro` definitions produced by
// `macro_rules!`, but at least such configurations are not stable yet.
ctxt = ctxt.normalize_to_macro_rules();
debug!(
"resolve_crate_root: marks={:?}",
ctxt.marks().into_iter().map(|(i, t)| (i.expn_data(), t)).collect::<Vec<_>>()
);
let mut iter = ctxt.marks().into_iter().rev().peekable();
let mut result = None;
// Find the last opaque mark from the end if it exists.
@ -2008,6 +2013,11 @@ impl<'a> Resolver<'a> {
break;
}
}
debug!(
"resolve_crate_root: found opaque mark {:?} {:?}",
result,
result.map(|r| r.expn_data())
);
// Then find the last semi-transparent mark from the end if it exists.
for (mark, transparency) in iter {
if transparency == Transparency::SemiTransparent {
@ -2016,16 +2026,36 @@ impl<'a> Resolver<'a> {
break;
}
}
debug!(
"resolve_crate_root: found semi-transparent mark {:?} {:?}",
result,
result.map(|r| r.expn_data())
);
result
} else {
debug!("resolve_crate_root: not DollarCrate");
ctxt = ctxt.normalize_to_macros_2_0();
ctxt.adjust(ExpnId::root())
};
let module = match mark {
Some(def) => self.macro_def_scope(def),
None => return self.graph_root,
None => {
debug!(
"resolve_crate_root({:?}): found no mark (ident.span = {:?})",
ident, ident.span
);
return self.graph_root;
}
};
self.get_module(DefId { index: CRATE_DEF_INDEX, ..module.normal_ancestor_id })
let module = self.get_module(DefId { index: CRATE_DEF_INDEX, ..module.normal_ancestor_id });
debug!(
"resolve_crate_root({:?}): got module {:?} ({:?}) (ident.span = {:?})",
ident,
module,
module.kind.name(),
ident.span
);
module
}
fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> {

View File

@ -789,7 +789,7 @@ impl<'tcx> SaveContext<'tcx> {
let callee = span.source_callee()?;
let mac_name = match callee.kind {
ExpnKind::Macro(mac_kind, name) => match mac_kind {
ExpnKind::Macro(kind, name) => match kind {
MacroKind::Bang => name,
// Ignore attribute macros, their spans are usually mangled

View File

@ -247,3 +247,9 @@ impl<CTX: HashStableContext> HashStable<CTX> for DefId {
hcx.hash_def_id(*self, hasher)
}
}
impl<CTX: HashStableContext> HashStable<CTX> for CrateNum {
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
hcx.hash_crate_num(*self, hasher)
}
}

View File

@ -24,24 +24,27 @@
// because getting it wrong can lead to nested `HygieneData::with` calls that
// trigger runtime aborts. (Fortunately these are obvious and easy to fix.)
use crate::def_id::{DefId, CRATE_DEF_INDEX};
use crate::edition::Edition;
use crate::symbol::{kw, sym, Symbol};
use crate::SESSION_GLOBALS;
use crate::{Span, DUMMY_SP};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use crate::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use log::*;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::{Lock, Lrc};
use rustc_macros::HashStable_Generic;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_serialize::{
Decodable, Decoder, Encodable, Encoder, UseSpecializedDecodable, UseSpecializedEncodable,
};
use std::fmt;
/// A `SyntaxContext` represents a chain of pairs `(ExpnId, Transparency)` named "marks".
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SyntaxContext(u32);
#[derive(Debug)]
struct SyntaxContextData {
#[derive(Debug, RustcEncodable, RustcDecodable, Clone)]
pub struct SyntaxContextData {
outer_expn: ExpnId,
outer_transparency: Transparency,
parent: SyntaxContext,
@ -77,6 +80,8 @@ pub enum Transparency {
Opaque,
}
pub(crate) const NUM_TRANSPARENCIES: usize = 3;
impl ExpnId {
pub fn fresh(expn_data: Option<ExpnData>) -> Self {
HygieneData::with(|data| data.fresh_expn(expn_data))
@ -104,10 +109,11 @@ impl ExpnId {
}
#[inline]
pub fn set_expn_data(self, expn_data: ExpnData) {
pub fn set_expn_data(self, mut expn_data: ExpnData) {
HygieneData::with(|data| {
let old_expn_data = &mut data.expn_data[self.0 as usize];
assert!(old_expn_data.is_none(), "expansion data is reset for an expansion ID");
expn_data.orig_id.replace(self.as_u32()).expect_none("orig_id should be None");
*old_expn_data = Some(expn_data);
})
}
@ -143,7 +149,7 @@ impl ExpnId {
}
#[derive(Debug)]
crate struct HygieneData {
pub struct HygieneData {
/// Each expansion should have an associated expansion data, but sometimes there's a delay
/// between creation of an expansion ID and obtaining its data (e.g. macros are collected
/// first and then resolved later), so we use an `Option` here.
@ -154,13 +160,16 @@ crate struct HygieneData {
impl HygieneData {
crate fn new(edition: Edition) -> Self {
let mut root_data = ExpnData::default(
ExpnKind::Root,
DUMMY_SP,
edition,
Some(DefId::local(CRATE_DEF_INDEX)),
);
root_data.orig_id = Some(0);
HygieneData {
expn_data: vec![Some(ExpnData::default(
ExpnKind::Root,
DUMMY_SP,
edition,
Some(DefId::local(CRATE_DEF_INDEX)),
))],
expn_data: vec![Some(root_data)],
syntax_context_data: vec![SyntaxContextData {
outer_expn: ExpnId::root(),
outer_transparency: Transparency::Opaque,
@ -173,13 +182,17 @@ impl HygieneData {
}
}
fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
pub fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
SESSION_GLOBALS.with(|session_globals| f(&mut *session_globals.hygiene_data.borrow_mut()))
}
fn fresh_expn(&mut self, expn_data: Option<ExpnData>) -> ExpnId {
fn fresh_expn(&mut self, mut expn_data: Option<ExpnData>) -> ExpnId {
let raw_id = self.expn_data.len() as u32;
if let Some(data) = expn_data.as_mut() {
data.orig_id.replace(raw_id).expect_none("orig_id should be None");
}
self.expn_data.push(expn_data);
ExpnId(self.expn_data.len() as u32 - 1)
ExpnId(raw_id)
}
fn expn_data(&self, expn_id: ExpnId) -> &ExpnData {
@ -226,6 +239,7 @@ impl HygieneData {
fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> {
let mut marks = Vec::new();
while ctxt != SyntaxContext::root() {
debug!("marks: getting parent of {:?}", ctxt);
marks.push(self.outer_mark(ctxt));
ctxt = self.parent_ctxt(ctxt);
}
@ -234,8 +248,14 @@ impl HygieneData {
}
fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span {
debug!("walk_chain({:?}, {:?})", span, to);
debug!("walk_chain: span ctxt = {:?}", span.ctxt());
while span.from_expansion() && span.ctxt() != to {
span = self.expn_data(self.outer_expn(span.ctxt())).call_site;
let outer_expn = self.outer_expn(span.ctxt());
debug!("walk_chain({:?}): outer_expn={:?}", span, outer_expn);
let expn_data = self.expn_data(outer_expn);
debug!("walk_chain({:?}): expn_data={:?}", span, expn_data);
span = expn_data.call_site;
}
span
}
@ -682,8 +702,25 @@ pub struct ExpnData {
/// The `DefId` of the macro being invoked,
/// if this `ExpnData` corresponds to a macro invocation
pub macro_def_id: Option<DefId>,
/// The crate that originally created this `ExpnData. During
/// metadata serialization, we only encode `ExpnData`s that were
/// created locally - when our serialized metadata is decoded,
/// foreign `ExpnId`s will have their `ExpnData` looked up
/// from the crate specified by `Crate
pub krate: CrateNum,
/// The raw that this `ExpnData` had in its original crate.
/// An `ExpnData` can be created before being assigned an `ExpnId`,
/// so this might be `None` until `set_expn_data` is called
// This is used only for serialization/deserialization purposes:
// two `ExpnData`s that differ only in their `orig_id` should
// be considered equivalent.
#[stable_hasher(ignore)]
pub orig_id: Option<u32>,
}
// This would require special handling of `orig_id` and `parent`
impl !PartialEq for ExpnData {}
impl ExpnData {
/// Constructs expansion data with default properties.
pub fn default(
@ -702,6 +739,8 @@ impl ExpnData {
local_inner_macros: false,
edition,
macro_def_id,
krate: LOCAL_CRATE,
orig_id: None,
}
}
@ -789,7 +828,7 @@ impl MacroKind {
}
/// The kind of AST transform.
#[derive(Clone, Copy, PartialEq, Debug, RustcEncodable, RustcDecodable, HashStable_Generic)]
#[derive(Clone, Copy, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable_Generic)]
pub enum AstPass {
StdImports,
TestHarness,
@ -847,14 +886,318 @@ impl DesugaringKind {
}
}
impl Encodable for ExpnId {
fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> {
Ok(()) // FIXME(jseyfried) intercrate hygiene
impl UseSpecializedEncodable for ExpnId {}
impl UseSpecializedDecodable for ExpnId {}
#[derive(Default)]
pub struct HygieneEncodeContext {
/// All `SyntaxContexts` for which we have writen `SyntaxContextData` into crate metadata.
/// This is `None` after we finish encoding `SyntaxContexts`, to ensure
/// that we don't accidentally try to encode any more `SyntaxContexts`
serialized_ctxts: Lock<FxHashSet<SyntaxContext>>,
/// The `SyntaxContexts` that we have serialized (e.g. as a result of encoding `Spans`)
/// in the most recent 'round' of serializnig. Serializing `SyntaxContextData`
/// may cause us to serialize more `SyntaxContext`s, so serialize in a loop
/// until we reach a fixed point.
latest_ctxts: Lock<FxHashSet<SyntaxContext>>,
serialized_expns: Lock<FxHashSet<ExpnId>>,
latest_expns: Lock<FxHashSet<ExpnId>>,
}
impl HygieneEncodeContext {
pub fn encode<
T,
R,
F: FnMut(&mut T, u32, &SyntaxContextData) -> Result<(), R>,
G: FnMut(&mut T, u32, &ExpnData) -> Result<(), R>,
>(
&self,
encoder: &mut T,
mut encode_ctxt: F,
mut encode_expn: G,
) -> Result<(), R> {
// When we serialize a `SyntaxContextData`, we may end up serializing
// a `SyntaxContext` that we haven't seen before
while !self.latest_ctxts.lock().is_empty() || !self.latest_expns.lock().is_empty() {
debug!(
"encode_hygiene: Serializing a round of {:?} SyntaxContextDatas: {:?}",
self.latest_ctxts.lock().len(),
self.latest_ctxts
);
// Consume the current round of SyntaxContexts.
// Drop the lock() temporary early
let latest_ctxts = { std::mem::take(&mut *self.latest_ctxts.lock()) };
// It's fine to iterate over a HashMap, because the serialization
// of the table that we insert data into doesn't depend on insertion
// order
for_all_ctxts_in(latest_ctxts.into_iter(), |(index, ctxt, data)| {
if self.serialized_ctxts.lock().insert(ctxt) {
encode_ctxt(encoder, index, data)?;
}
Ok(())
})?;
let latest_expns = { std::mem::take(&mut *self.latest_expns.lock()) };
for_all_expns_in(latest_expns.into_iter(), |index, expn, data| {
if self.serialized_expns.lock().insert(expn) {
encode_expn(encoder, index, data)?;
}
Ok(())
})?;
}
debug!("encode_hygiene: Done serializing SyntaxContextData");
Ok(())
}
}
impl Decodable for ExpnId {
fn decode<D: Decoder>(_: &mut D) -> Result<Self, D::Error> {
Ok(ExpnId::root()) // FIXME(jseyfried) intercrate hygiene
#[derive(Default)]
/// Additional information used to assist in decoding hygiene data
pub struct HygieneDecodeContext {
// Maps serialized `SyntaxContext` ids to a `SyntaxContext` in the current
// global `HygieneData`. When we deserialize a `SyntaxContext`, we need to create
// a new id in the global `HygieneData`. This map tracks the ID we end up picking,
// so that multiple occurences of the same serialized id are decoded to the same
// `SyntaxContext`
remapped_ctxts: Lock<Vec<Option<SyntaxContext>>>,
// The same as `remapepd_ctxts`, but for `ExpnId`s
remapped_expns: Lock<Vec<Option<ExpnId>>>,
}
pub fn decode_expn_id<
'a,
D: Decoder,
F: FnOnce(&mut D, u32) -> Result<ExpnData, D::Error>,
G: FnOnce(CrateNum) -> &'a HygieneDecodeContext,
>(
d: &mut D,
mode: ExpnDataDecodeMode<'a, G>,
decode_data: F,
) -> Result<ExpnId, D::Error> {
let index = u32::decode(d)?;
let context = match mode {
ExpnDataDecodeMode::IncrComp(context) => context,
ExpnDataDecodeMode::Metadata(get_context) => {
let krate = CrateNum::decode(d)?;
get_context(krate)
}
};
// Do this after decoding, so that we decode a `CrateNum`
// if necessary
if index == ExpnId::root().as_u32() {
debug!("decode_expn_id: deserialized root");
return Ok(ExpnId::root());
}
let outer_expns = &context.remapped_expns;
// Ensure that the lock() temporary is dropped early
{
if let Some(expn_id) = outer_expns.lock().get(index as usize).copied().flatten() {
return Ok(expn_id);
}
}
// Don't decode the data inside `HygieneData::with`, since we need to recursively decode
// other ExpnIds
let mut expn_data = decode_data(d, index)?;
let expn_id = HygieneData::with(|hygiene_data| {
let expn_id = ExpnId(hygiene_data.expn_data.len() as u32);
// If we just deserialized an `ExpnData` owned by
// the local crate, its `orig_id` will be stale,
// so we need to update it to its own value.
// This only happens when we deserialize the incremental cache,
// since a crate will never decode its own metadata.
if expn_data.krate == LOCAL_CRATE {
expn_data.orig_id = Some(expn_id.0);
}
hygiene_data.expn_data.push(Some(expn_data));
let mut expns = outer_expns.lock();
let new_len = index as usize + 1;
if expns.len() < new_len {
expns.resize(new_len, None);
}
expns[index as usize] = Some(expn_id);
drop(expns);
expn_id
});
return Ok(expn_id);
}
// Decodes `SyntaxContext`, using the provided `HygieneDecodeContext`
// to track which `SyntaxContext`s we have already decoded.
// The provided closure will be invoked to deserialize a `SyntaxContextData`
// if we haven't already seen the id of the `SyntaxContext` we are deserializing.
pub fn decode_syntax_context<
D: Decoder,
F: FnOnce(&mut D, u32) -> Result<SyntaxContextData, D::Error>,
>(
d: &mut D,
context: &HygieneDecodeContext,
decode_data: F,
) -> Result<SyntaxContext, D::Error> {
let raw_id: u32 = Decodable::decode(d)?;
if raw_id == 0 {
debug!("decode_syntax_context: deserialized root");
// The root is special
return Ok(SyntaxContext::root());
}
let outer_ctxts = &context.remapped_ctxts;
// Ensure that the lock() temporary is dropped early
{
if let Some(ctxt) = outer_ctxts.lock().get(raw_id as usize).copied().flatten() {
return Ok(ctxt);
}
}
// Allocate and store SyntaxContext id *before* calling the decoder function,
// as the SyntaxContextData may reference itself.
let new_ctxt = HygieneData::with(|hygiene_data| {
let new_ctxt = SyntaxContext(hygiene_data.syntax_context_data.len() as u32);
// Push a dummy SyntaxContextData to ensure that nobody else can get the
// same ID as us. This will be overwritten after call `decode_Data`
hygiene_data.syntax_context_data.push(SyntaxContextData {
outer_expn: ExpnId::root(),
outer_transparency: Transparency::Transparent,
parent: SyntaxContext::root(),
opaque: SyntaxContext::root(),
opaque_and_semitransparent: SyntaxContext::root(),
dollar_crate_name: kw::Invalid,
});
let mut ctxts = outer_ctxts.lock();
let new_len = raw_id as usize + 1;
if ctxts.len() < new_len {
ctxts.resize(new_len, None);
}
ctxts[raw_id as usize] = Some(new_ctxt);
drop(ctxts);
new_ctxt
});
// Don't try to decode data while holding the lock, since we need to
// be able to recursively decode a SyntaxContext
let mut ctxt_data = decode_data(d, raw_id)?;
// Reset `dollar_crate_name` so that it will be updated by `update_dollar_crate_names`
// We don't care what the encoding crate set this to - we want to resolve it
// from the perspective of the current compilation session
ctxt_data.dollar_crate_name = kw::DollarCrate;
// Overwrite the dummy data with our decoded SyntaxContextData
HygieneData::with(|hygiene_data| {
let dummy = std::mem::replace(
&mut hygiene_data.syntax_context_data[new_ctxt.as_u32() as usize],
ctxt_data,
);
// Make sure nothing weird happening while `decode_data` was running
assert_eq!(dummy.dollar_crate_name, kw::Invalid);
});
return Ok(new_ctxt);
}
pub fn num_syntax_ctxts() -> usize {
HygieneData::with(|data| data.syntax_context_data.len())
}
pub fn for_all_ctxts_in<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
ctxts: impl Iterator<Item = SyntaxContext>,
mut f: F,
) -> Result<(), E> {
let all_data: Vec<_> = HygieneData::with(|data| {
ctxts.map(|ctxt| (ctxt, data.syntax_context_data[ctxt.0 as usize].clone())).collect()
});
for (ctxt, data) in all_data.into_iter() {
f((ctxt.0, ctxt, &data))?;
}
Ok(())
}
pub fn for_all_expns_in<E, F: FnMut(u32, ExpnId, &ExpnData) -> Result<(), E>>(
expns: impl Iterator<Item = ExpnId>,
mut f: F,
) -> Result<(), E> {
let all_data: Vec<_> = HygieneData::with(|data| {
expns.map(|expn| (expn, data.expn_data[expn.0 as usize].clone())).collect()
});
for (expn, data) in all_data.into_iter() {
f(expn.0, expn, &data.unwrap_or_else(|| panic!("Missing data for {:?}", expn)))?;
}
Ok(())
}
pub fn for_all_data<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
mut f: F,
) -> Result<(), E> {
let all_data = HygieneData::with(|data| data.syntax_context_data.clone());
for (i, data) in all_data.into_iter().enumerate() {
f((i as u32, SyntaxContext(i as u32), &data))?;
}
Ok(())
}
pub fn for_all_expn_data<E, F: FnMut(u32, &ExpnData) -> Result<(), E>>(mut f: F) -> Result<(), E> {
let all_data = HygieneData::with(|data| data.expn_data.clone());
for (i, data) in all_data.into_iter().enumerate() {
f(i as u32, &data.unwrap_or_else(|| panic!("Missing ExpnData!")))?;
}
Ok(())
}
pub fn raw_encode_syntax_context<E: Encoder>(
ctxt: SyntaxContext,
context: &HygieneEncodeContext,
e: &mut E,
) -> Result<(), E::Error> {
if !context.serialized_ctxts.lock().contains(&ctxt) {
context.latest_ctxts.lock().insert(ctxt);
}
ctxt.0.encode(e)
}
pub fn raw_encode_expn_id<E: Encoder>(
expn: ExpnId,
context: &HygieneEncodeContext,
mode: ExpnDataEncodeMode,
e: &mut E,
) -> Result<(), E::Error> {
if !context.serialized_expns.lock().contains(&expn) {
context.latest_expns.lock().insert(expn);
}
match mode {
ExpnDataEncodeMode::IncrComp => expn.0.encode(e),
ExpnDataEncodeMode::Metadata => {
let data = expn.expn_data();
data.orig_id.expect("Missing orig_id").encode(e)?;
data.krate.encode(e)
}
}
}
pub enum ExpnDataEncodeMode {
IncrComp,
Metadata,
}
pub enum ExpnDataDecodeMode<'a, F: FnOnce(CrateNum) -> &'a HygieneDecodeContext> {
IncrComp(&'a HygieneDecodeContext),
Metadata(F),
}
impl<'a> ExpnDataDecodeMode<'a, Box<dyn FnOnce(CrateNum) -> &'a HygieneDecodeContext>> {
pub fn incr_comp(ctxt: &'a HygieneDecodeContext) -> Self {
ExpnDataDecodeMode::IncrComp(ctxt)
}
}
impl UseSpecializedEncodable for SyntaxContext {}
impl UseSpecializedDecodable for SyntaxContext {}

View File

@ -12,6 +12,8 @@
#![feature(nll)]
#![feature(optin_builtin_traits)]
#![feature(min_specialization)]
#![feature(option_expect_none)]
#![feature(refcell_take)]
// FIXME(#56935): Work around ICEs during cross-compilation.
#[allow(unused)]
@ -30,8 +32,8 @@ pub mod edition;
use edition::Edition;
pub mod hygiene;
pub use hygiene::SyntaxContext;
use hygiene::Transparency;
pub use hygiene::{DesugaringKind, ExpnData, ExpnId, ExpnKind, ForLoopLoc, MacroKind};
use hygiene::{Transparency, NUM_TRANSPARENCIES};
pub mod def_id;
use def_id::{CrateNum, DefId, LOCAL_CRATE};
mod span_encoding;
@ -44,7 +46,6 @@ mod analyze_source_file;
pub mod fatal_error;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{Lock, Lrc};
@ -86,6 +87,9 @@ impl SessionGlobals {
}
}
// If this ever becomes non thread-local, `decode_syntax_context`
// and `decode_expn_id` will need to be updated to handle concurrent
// deserialization.
scoped_tls::scoped_thread_local!(pub static SESSION_GLOBALS: SessionGlobals);
// FIXME: Perhaps this should not implement Rustc{Decodable, Encodable}
@ -1733,8 +1737,9 @@ fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize {
/// This is a hack to allow using the `HashStable_Generic` derive macro
/// instead of implementing everything in librustc_middle.
pub trait HashStableContext {
fn hash_spans(&self) -> bool;
fn hash_def_id(&mut self, _: DefId, hasher: &mut StableHasher);
fn hash_crate_num(&mut self, _: CrateNum, hasher: &mut StableHasher);
fn hash_spans(&self) -> bool;
fn byte_pos_to_line_and_col(
&mut self,
byte: BytePos,
@ -1757,15 +1762,14 @@ where
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
const TAG_VALID_SPAN: u8 = 0;
const TAG_INVALID_SPAN: u8 = 1;
const TAG_EXPANSION: u8 = 0;
const TAG_NO_EXPANSION: u8 = 1;
if !ctx.hash_spans() {
return;
}
if *self == DUMMY_SP {
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
return;
}
// If this is not an empty or invalid span, we want to hash the last
@ -1775,12 +1779,16 @@ where
let (file_lo, line_lo, col_lo) = match ctx.byte_pos_to_line_and_col(span.lo) {
Some(pos) => pos,
None => {
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
span.ctxt.hash_stable(ctx, hasher);
return;
}
};
if !file_lo.contains(span.hi) {
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
span.ctxt.hash_stable(ctx, hasher);
return;
}
std::hash::Hash::hash(&TAG_VALID_SPAN, hasher);
@ -1793,8 +1801,16 @@ where
let len = ((span.hi - span.lo).0 as u64) << 32;
let line_col_len = col | line | len;
std::hash::Hash::hash(&line_col_len, hasher);
span.ctxt.hash_stable(ctx, hasher);
}
}
if span.ctxt == SyntaxContext::root() {
impl<CTX: HashStableContext> HashStable<CTX> for SyntaxContext {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
const TAG_EXPANSION: u8 = 0;
const TAG_NO_EXPANSION: u8 = 1;
if *self == SyntaxContext::root() {
TAG_NO_EXPANSION.hash_stable(ctx, hasher);
} else {
TAG_EXPANSION.hash_stable(ctx, hasher);
@ -1803,21 +1819,39 @@ where
// times, we cache a stable hash of it and hash that instead of
// recursing every time.
thread_local! {
static CACHE: RefCell<FxHashMap<hygiene::ExpnId, u64>> = Default::default();
static CACHE: RefCell<Vec<Option<[Option<u64>; NUM_TRANSPARENCIES]>>> = Default::default();
}
let sub_hash: u64 = CACHE.with(|cache| {
let expn_id = span.ctxt.outer_expn();
let (expn_id, transparency, _) = self.outer_mark_with_data();
let index = expn_id.as_u32() as usize;
if let Some(&sub_hash) = cache.borrow().get(&expn_id) {
return sub_hash;
if let Some(sub_hash_cache) = cache.borrow().get(index).copied().flatten() {
if let Some(sub_hash) = sub_hash_cache[transparency as usize] {
return sub_hash;
}
}
let new_len = index + 1;
let mut hasher = StableHasher::new();
expn_id.expn_data().hash_stable(ctx, &mut hasher);
transparency.hash_stable(ctx, &mut hasher);
let sub_hash: Fingerprint = hasher.finish();
let sub_hash = sub_hash.to_smaller_hash();
cache.borrow_mut().insert(expn_id, sub_hash);
let mut cache = cache.borrow_mut();
if cache.len() < new_len {
cache.resize(new_len, None);
}
if let Some(mut sub_hash_cache) = cache[index] {
sub_hash_cache[transparency as usize] = Some(sub_hash);
} else {
let mut sub_hash_cache = [None; NUM_TRANSPARENCIES];
sub_hash_cache[transparency as usize] = Some(sub_hash);
cache[index] = Some(sub_hash_cache);
}
sub_hash
});

View File

@ -0,0 +1,36 @@
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
// We use #[inline(always)] to ensure that the downstream crate
// will always load the MIR for these functions
#![feature(rustc_attrs)]
#[allow(unused)]
macro_rules! first_macro {
() => {
println!("New call!");
}
}
#[rustc_dirty(label="typeck", cfg="rpass2")]
#[inline(always)]
pub fn changed_fn() {
// This will cause additional hygiene to be generate,
// which will cause the SyntaxContext/ExpnId raw ids to be
// different when we write out `my_fn` to the crate metadata.
#[cfg(rpass2)]
first_macro!();
}
macro_rules! print_loc {
() => {
println!("Caller loc: {}", std::panic::Location::caller());
}
}
#[rustc_clean(cfg="rpass2")]
#[inline(always)]
pub fn unchanged_fn() {
print_loc!();
}

View File

@ -0,0 +1,48 @@
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
// aux-build:cached_hygiene.rs
// This tests the folllowing scenario
// 1. A foreign crate is compiled with incremental compilation.
// This causes hygiene information to be saved to the incr cache.
// 2. One function is the foreign crate is modified. This causes the
// optimized mir for an unmodified function to be loaded from the
// incremental cache and written out to the crate metadata.
// 3. In the process of loading and writing out this function's MIR,
// we load hygiene information from the incremental cache and
// write it to our metadata.
// 4. This hygiene information is loaded by another crate (this file)
// Previously, this situation would cause hygiene identifiers
// (SyntaxContexts and ExpnIds) to get corrupted when we tried to
// serialize the hygiene information loaded from the incr cache into
// the metadata. Specifically, we were not resetting `orig_id`
// for an `EpxnData` generate in the current crate, which would cause
// us to serialize the `ExpnId` pointing to a garbage location in
// the metadata.
#![feature(rustc_attrs)]
#![rustc_partition_reused(module="load_cached_hygiene-call_unchanged_function", cfg="rpass2")]
#![rustc_partition_codegened(module="load_cached_hygiene-call_changed_function", cfg="rpass2")]
extern crate cached_hygiene;
pub mod call_unchanged_function {
pub fn unchanged() {
cached_hygiene::unchanged_fn();
}
}
pub mod call_changed_function {
pub fn changed() {
cached_hygiene::changed_fn();
}
}
pub fn main() {
call_unchanged_function::unchanged();
call_changed_function::changed();
}

View File

@ -0,0 +1,5 @@
#![feature(decl_macro)]
macro x() { struct MyStruct; }
x!();
x!();

View File

@ -0,0 +1,14 @@
pub const IN_DEF_CRATE: &str = "In def crate!";
macro_rules! make_it {
() => {
#[macro_export]
macro_rules! inner {
() => {
$crate::IN_DEF_CRATE
}
}
}
}
make_it!();

View File

@ -0,0 +1,8 @@
// check-pass
// aux-build:needs_hygiene.rs
extern crate needs_hygiene;
use needs_hygiene::*;
fn main() {}

View File

@ -0,0 +1,9 @@
// aux-build:nested-dollar-crate.rs
// edition:2018
// run-pass
extern crate nested_dollar_crate;
fn main() {
assert_eq!(nested_dollar_crate::inner!(), "In def crate!");
}

View File

@ -0,0 +1,10 @@
// run-fail
// check-run-results
// exec-env:RUST_BACKTRACE=0
//
// Regression test for issue #70963
// The captured stderr from this test reports a location
// inside `VecDeque::with_capacity`, instead of `<::core::macros::panic macros>`
fn main() {
std::collections::VecDeque::<String>::with_capacity(!0);
}

View File

@ -0,0 +1,2 @@
thread 'main' panicked at 'capacity overflow', $SRC_DIR/liballoc/collections/vec_deque.rs:LL:COL
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace

View File

@ -6,7 +6,13 @@ macro_rules! make_it {
#[proc_macro]
pub fn $name(input: TokenStream) -> TokenStream {
println!("Def site: {:?}", Span::def_site());
input
println!("Input: {:?}", input);
let new: TokenStream = input.into_iter().map(|mut t| {
t.set_span(Span::def_site());
t
}).collect();
println!("Respanned: {:?}", new);
new
}
};
}

View File

@ -10,3 +10,6 @@ extern crate make_macro;
use proc_macro::{TokenStream, Span};
make_macro::make_it!(print_def_site);
#[proc_macro]
pub fn dummy(input: TokenStream) -> TokenStream { input }

View File

@ -5,6 +5,7 @@
// Anonymize unstable non-dummy spans while still showing dummy spans `0..0`.
// normalize-stdout-test "bytes\([^0]\w*\.\.(\w+)\)" -> "bytes(LO..$1)"
// normalize-stdout-test "bytes\((\w+)\.\.[^0]\w*\)" -> "bytes($1..HI)"
// normalize-stdout-test "#\d+" -> "#CTXT"
#[macro_use]
extern crate test_macros;

View File

@ -2,79 +2,79 @@ PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
PRINT-BANG INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "M",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]
PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "A",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]

View File

@ -6,6 +6,7 @@
// Anonymize unstable non-dummy spans while still showing dummy spans `0..0`.
// normalize-stdout-test "bytes\([^0]\w*\.\.(\w+)\)" -> "bytes(LO..$1)"
// normalize-stdout-test "bytes\((\w+)\.\.[^0]\w*\)" -> "bytes($1..HI)"
// normalize-stdout-test "#\d+" -> "#CTXT"
#[macro_use]
extern crate test_macros;

View File

@ -2,109 +2,109 @@ PRINT-ATTR INPUT (DISPLAY): struct A(identity ! ($crate :: S)) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "A",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "identity",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: '!',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]
PRINT-ATTR INPUT (DISPLAY): struct B(identity ! ($crate :: S)) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "B",
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "identity",
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: '!',
spacing: Alone,
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #10 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]

View File

@ -6,6 +6,7 @@
// Anonymize unstable non-dummy spans while still showing dummy spans `0..0`.
// normalize-stdout-test "bytes\([^0]\w*\.\.(\w+)\)" -> "bytes(LO..$1)"
// normalize-stdout-test "bytes\((\w+)\.\.[^0]\w*\)" -> "bytes($1..HI)"
// normalize-stdout-test "#\d+" -> "#CTXT"
#[macro_use]
extern crate test_macros;

View File

@ -2,239 +2,239 @@ PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
PRINT-BANG INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "M",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]
PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "A",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]
PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "D",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #3 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]
PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
PRINT-BANG INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "M",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]
PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "A",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]
PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "D",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "$crate",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Joint,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ':',
spacing: Alone,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Ident {
ident: "S",
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
],
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
Punct {
ch: ';',
spacing: Alone,
span: #13 bytes(LO..HI),
span: #CTXT bytes(LO..HI),
},
]

View File

@ -1,6 +1,7 @@
// Check what token streams proc macros see when interpolated tokens are passed to them as input.
// check-pass
// normalize-stdout-test "#\d+" -> "#CTXT"
// aux-build:test-macros.rs
#[macro_use]

View File

@ -5,61 +5,61 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
stream: TokenStream [
Ident {
ident: "A",
span: #0 bytes(402..403),
span: #CTXT bytes(445..446),
},
],
span: #3 bytes(269..271),
span: #CTXT bytes(312..314),
},
]
PRINT-ATTR INPUT (DISPLAY): const A : u8 = 0 ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "const",
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Ident {
ident: "A",
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Punct {
ch: ':',
spacing: Alone,
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Ident {
ident: "u8",
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Punct {
ch: '=',
spacing: Alone,
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Punct {
ch: ';',
spacing: Alone,
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
]
PRINT-DERIVE INPUT (DISPLAY): struct A { }
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Ident {
ident: "A",
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: #0 bytes(0..0),
span: #CTXT bytes(0..0),
},
]

View File

@ -1,13 +1,26 @@
// aux-build:make-macro.rs
// aux-build:meta-macro.rs
// edition:2018
// compile-flags: -Z span-debug -Z unpretty=expanded,hygiene
// compile-flags: -Z span-debug -Z macro-backtrace
// check-pass
// normalize-stdout-test "#\d+" -> "#CTXT"
// normalize-stdout-test "\d+#" -> "0#"
// ^ We don't care about symbol ids, so set them all to 0
//
// We don't care about symbol ids, so we set them all to 0
// in the stdout
extern crate meta_macro;
fn main() {
meta_macro::print_def_site!();
macro_rules! produce_it {
() => {
// `print_def_site!` will respan the `$crate` identifier
// with `Span::def_site()`. This should cause it to resolve
// relative to `meta_macro`, *not* `make_macro` (despite
// the fact that that `print_def_site` is produced by
// a `macro_rules!` macro in `make_macro`).
meta_macro::print_def_site!($crate::dummy!());
}
}
fn main() {
produce_it!();
}

View File

@ -1,32 +1,3 @@
Def site: $DIR/auxiliary/make-macro.rs:7:9: 10:10 (#3)
#![feature /* 0#0 */(prelude_import)]
#[prelude_import /* 0#1 */]
use std /* 0#1 */::prelude /* 0#1 */::v1 /* 0#1 */::*;
#[macro_use /* 0#1 */]
extern crate std /* 0#1 */;
// aux-build:make-macro.rs
// aux-build:meta-macro.rs
// edition:2018
// compile-flags: -Z span-debug -Z unpretty=expanded,hygiene
// check-pass
// normalize-stdout-test "\d+#" -> "0#"
// ^ We don't care about symbol ids, so set them all to 0
// in the stdout
extern crate meta_macro /* 0#0 */;
fn main /* 0#0 */() { }
/*
Expansions:
0: parent: ExpnId(0), call_site_ctxt: #0, def_site_ctxt: #0, kind: Root
1: parent: ExpnId(0), call_site_ctxt: #0, def_site_ctxt: #0, kind: AstPass(StdImports)
2: parent: ExpnId(0), call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "meta_macro::print_def_site")
SyntaxContexts:
#0: parent: #0, outer_mark: (ExpnId(0), Opaque)
#1: parent: #0, outer_mark: (ExpnId(1), Opaque)
#2: parent: #0, outer_mark: (ExpnId(1), Transparent)
#3: parent: #0, outer_mark: (ExpnId(2), Opaque)
#4: parent: #0, outer_mark: (ExpnId(2), Transparent)
#5: parent: #0, outer_mark: (ExpnId(2), SemiTransparent)
*/
Def site: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT)
Input: TokenStream [Ident { ident: "$crate", span: $DIR/meta-macro-hygiene.rs:20:37: 20:43 (#CTXT) }, Punct { ch: ':', spacing: Joint, span: $DIR/meta-macro-hygiene.rs:20:43: 20:45 (#CTXT) }, Punct { ch: ':', spacing: Alone, span: $DIR/meta-macro-hygiene.rs:20:43: 20:45 (#CTXT) }, Ident { ident: "dummy", span: $DIR/meta-macro-hygiene.rs:20:45: 20:50 (#CTXT) }, Punct { ch: '!', spacing: Alone, span: $DIR/meta-macro-hygiene.rs:20:50: 20:51 (#CTXT) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: $DIR/meta-macro-hygiene.rs:20:51: 20:53 (#CTXT) }]
Respanned: TokenStream [Ident { ident: "$crate", span: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT) }, Punct { ch: ':', spacing: Joint, span: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT) }, Punct { ch: ':', spacing: Alone, span: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT) }, Ident { ident: "dummy", span: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT) }, Punct { ch: '!', spacing: Alone, span: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT) }]

View File

@ -2,6 +2,7 @@
// aux-build:meta-macro.rs
// edition:2018
// compile-flags: -Z span-debug
// normalize-stdout-test "#\d+" -> "#CTXT"
// run-pass
extern crate meta_macro;

View File

@ -1 +1,3 @@
Def site: $DIR/auxiliary/make-macro.rs:7:9: 10:10 (#3)
Def site: $DIR/auxiliary/make-macro.rs:7:9: 16:10 (#CTXT)
Input: TokenStream []
Respanned: TokenStream []

View File

@ -2,6 +2,7 @@
// aux-build:nested-macro-rules.rs
// aux-build:test-macros.rs
// compile-flags: -Z span-debug
// normalize-stdout-test "#\d+" -> "#CTXT"
// edition:2018
extern crate nested_macro_rules;

View File

@ -5,10 +5,10 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
stream: TokenStream [
Ident {
ident: "FirstStruct",
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#3),
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#CTXT),
},
],
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#3),
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#CTXT),
},
]
PRINT-BANG INPUT (DISPLAY): SecondStruct
@ -18,9 +18,9 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
stream: TokenStream [
Ident {
ident: "SecondStruct",
span: $DIR/nested-macro-rules.rs:18:38: 18:50 (#9),
span: $DIR/nested-macro-rules.rs:19:38: 19:50 (#CTXT),
},
],
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#8),
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#CTXT),
},
]

View File

@ -1,6 +1,7 @@
// run-pass
// aux-build:test-macros.rs
// compile-flags: -Z span-debug
// normalize-stdout-test "#\d+" -> "#CTXT"
// edition:2018
//
// Tests the pretty-printing behavior of inserting `NoDelim` groups

View File

@ -4,7 +4,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
kind: Str,
symbol: "hi",
suffix: None,
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#3),
span: $DIR/nodelim-groups.rs:14:42: 14:46 (#CTXT),
},
Group {
delimiter: None,
@ -13,12 +13,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/nodelim-groups.rs:17:16: 17:17 (#0),
span: $DIR/nodelim-groups.rs:18:16: 18:17 (#CTXT),
},
Punct {
ch: '+',
spacing: Alone,
span: $DIR/nodelim-groups.rs:17:18: 17:19 (#0),
span: $DIR/nodelim-groups.rs:18:18: 18:19 (#CTXT),
},
Group {
delimiter: Parenthesis,
@ -27,24 +27,24 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "25",
suffix: None,
span: $DIR/nodelim-groups.rs:17:21: 17:23 (#0),
span: $DIR/nodelim-groups.rs:18:21: 18:23 (#CTXT),
},
],
span: $DIR/nodelim-groups.rs:17:20: 17:24 (#0),
span: $DIR/nodelim-groups.rs:18:20: 18:24 (#CTXT),
},
Punct {
ch: '+',
spacing: Alone,
span: $DIR/nodelim-groups.rs:17:25: 17:26 (#0),
span: $DIR/nodelim-groups.rs:18:25: 18:26 (#CTXT),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/nodelim-groups.rs:17:27: 17:28 (#0),
span: $DIR/nodelim-groups.rs:18:27: 18:28 (#CTXT),
},
],
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#3),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Group {
delimiter: Parenthesis,
@ -53,21 +53,21 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#3),
span: $DIR/nodelim-groups.rs:14:53: 14:54 (#CTXT),
},
Punct {
ch: '+',
spacing: Alone,
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#3),
span: $DIR/nodelim-groups.rs:14:55: 14:56 (#CTXT),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#3),
span: $DIR/nodelim-groups.rs:14:57: 14:58 (#CTXT),
},
],
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#3),
span: $DIR/nodelim-groups.rs:14:52: 14:59 (#CTXT),
},
]
PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1)
@ -77,7 +77,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
kind: Str,
symbol: "hi",
suffix: None,
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#8),
span: $DIR/nodelim-groups.rs:14:42: 14:46 (#CTXT),
},
Group {
delimiter: None,
@ -86,49 +86,49 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
kind: Str,
symbol: "hello",
suffix: None,
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Punct {
ch: '.',
spacing: Alone,
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Ident {
ident: "len",
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Punct {
ch: '+',
spacing: Alone,
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Literal {
kind: Str,
symbol: "world",
suffix: None,
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Punct {
ch: '.',
spacing: Alone,
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Ident {
ident: "len",
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
],
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
span: $DIR/nodelim-groups.rs:14:47: 14:51 (#CTXT),
},
Group {
delimiter: Parenthesis,
@ -137,20 +137,20 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#8),
span: $DIR/nodelim-groups.rs:14:53: 14:54 (#CTXT),
},
Punct {
ch: '+',
spacing: Alone,
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#8),
span: $DIR/nodelim-groups.rs:14:55: 14:56 (#CTXT),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#8),
span: $DIR/nodelim-groups.rs:14:57: 14:58 (#CTXT),
},
],
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#8),
span: $DIR/nodelim-groups.rs:14:52: 14:59 (#CTXT),
},
]