Use Span instead of SyntaxContext in Ident

This commit is contained in:
Vadim Petrochenkov 2018-03-18 02:57:23 +03:00
parent 48fa6f9631
commit baae274fb7
23 changed files with 146 additions and 93 deletions

View File

@ -1097,7 +1097,7 @@ impl TokenTree {
}).into();
},
self::TokenTree::Term(tt) => {
let ident = ast::Ident { name: tt.sym, ctxt: tt.span.0.ctxt() };
let ident = ast::Ident::new(tt.sym, tt.span.0);
let sym_str = tt.sym.as_str();
let token =
if sym_str.starts_with("'") { Lifetime(ident) }

View File

@ -909,7 +909,7 @@ impl<'a> LoweringContext<'a> {
fn lower_ident(&mut self, ident: Ident) -> Name {
let ident = ident.modern();
if ident.ctxt == SyntaxContext::empty() {
if ident.span.ctxt() == SyntaxContext::empty() {
return ident.name;
}
*self.name_map
@ -2089,10 +2089,7 @@ impl<'a> LoweringContext<'a> {
name: self.lower_ident(match f.ident {
Some(ident) => ident,
// FIXME(jseyfried) positional field hygiene
None => Ident {
name: Symbol::intern(&index.to_string()),
ctxt: f.span.ctxt(),
},
None => Ident::new(Symbol::intern(&index.to_string()), f.span),
}),
vis: self.lower_visibility(&f.vis, None),
ty: self.lower_ty(&f.ty, ImplTraitContext::Disallowed),

View File

@ -655,7 +655,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for ast::Ident {
hasher: &mut StableHasher<W>) {
let ast::Ident {
ref name,
ctxt: _ // Ignore this
span: _ // Ignore this
} = *self;
name.hash_stable(hcx, hasher);

View File

@ -2088,8 +2088,8 @@ impl<'a, 'gcx, 'tcx> VariantDef {
return Some(index);
}
let mut ident = name.to_ident();
while ident.ctxt != SyntaxContext::empty() {
ident.ctxt.remove_mark();
while ident.span.ctxt() != SyntaxContext::empty() {
ident.span.remove_mark();
if let Some(field) = self.fields.iter().position(|f| f.name.to_ident() == ident) {
return Some(field);
}
@ -2558,7 +2558,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
LOCAL_CRATE => self.hir.definitions().expansion(scope.index),
_ => Mark::root(),
};
let scope = match ident.ctxt.adjust(expansion) {
let scope = match ident.span.adjust(expansion) {
Some(macro_def) => self.hir.definitions().macro_def_scope(macro_def),
None if block == DUMMY_NODE_ID => DefId::local(CRATE_DEF_INDEX), // Dummy DefId
None => self.hir.get_module_parent(block),

View File

@ -466,11 +466,11 @@ impl<'a> PrinterSupport for HygieneAnnotation<'a> {
impl<'a> pprust::PpAnn for HygieneAnnotation<'a> {
fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeIdent(&ast::Ident { name, ctxt }) => {
pprust::NodeIdent(&ast::Ident { name, span }) => {
s.s.space()?;
// FIXME #16420: this doesn't display the connections
// between syntax contexts
s.synth_comment(format!("{}{:?}", name.as_u32(), ctxt))
s.synth_comment(format!("{}{:?}", name.as_u32(), span.ctxt()))
}
pprust::NodeName(&name) => {
s.s.space()?;

View File

@ -35,7 +35,6 @@ use rustc::util::nodemap::NodeSet;
use syntax::ast::{self, CRATE_NODE_ID, Ident};
use syntax::symbol::keywords;
use syntax_pos::Span;
use syntax_pos::hygiene::SyntaxContext;
use std::cmp;
use std::mem::replace;
@ -495,11 +494,11 @@ struct NamePrivacyVisitor<'a, 'tcx: 'a> {
impl<'a, 'tcx> NamePrivacyVisitor<'a, 'tcx> {
// Checks that a field in a struct constructor (expression or pattern) is accessible.
fn check_field(&mut self,
use_ctxt: SyntaxContext, // Syntax context of the field name at the use site
use_ctxt: Span, // Syntax context of the field name at the use site
span: Span, // Span of the field pattern, e.g. `x: 0`
def: &'tcx ty::AdtDef, // Definition of the struct or enum
field: &'tcx ty::FieldDef) { // Definition of the field
let ident = Ident { ctxt: use_ctxt.modern(), ..keywords::Invalid.ident() };
let ident = Ident::new(keywords::Invalid.name(), use_ctxt.modern());
let def_id = self.tcx.adjust_ident(ident, def.did, self.current_item).1;
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
struct_span_err!(self.tcx.sess, span, E0451, "field `{}` of {} `{}` is private",
@ -573,14 +572,14 @@ impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> {
for variant_field in &variant.fields {
let field = fields.iter().find(|f| f.name.node == variant_field.name);
let (use_ctxt, span) = match field {
Some(field) => (field.name.node.to_ident().ctxt, field.span),
None => (base.span.ctxt(), base.span),
Some(field) => (field.name.node.to_ident().span, field.span),
None => (base.span, base.span),
};
self.check_field(use_ctxt, span, adt, variant_field);
}
} else {
for field in fields {
let use_ctxt = field.name.node.to_ident().ctxt;
let use_ctxt = field.name.node.to_ident().span;
let field_def = variant.field_named(field.name.node);
self.check_field(use_ctxt, field.span, adt, field_def);
}
@ -599,7 +598,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> {
let adt = self.tables.pat_ty(pat).ty_adt_def().unwrap();
let variant = adt.variant_of_def(def);
for field in fields {
let use_ctxt = field.node.name.to_ident().ctxt;
let use_ctxt = field.node.name.to_ident().span;
let field_def = variant.field_named(field.node.name);
self.check_field(use_ctxt, field.span, adt, field_def);
}

View File

@ -157,7 +157,7 @@ impl<'a> Resolver<'a> {
// Disallow `use $crate;`
if source.name == keywords::DollarCrate.name() && path.segments.len() == 1 {
let crate_root = self.resolve_crate_root(source.ctxt, true);
let crate_root = self.resolve_crate_root(source.span.ctxt(), true);
let crate_name = match crate_root.kind {
ModuleKind::Def(_, name) => name,
ModuleKind::Block(..) => unreachable!(),

View File

@ -1912,10 +1912,11 @@ impl<'a> Resolver<'a> {
path_span: Span)
-> Option<LexicalScopeBinding<'a>> {
if ns == TypeNS {
ident.ctxt = if ident.name == keywords::SelfType.name() {
SyntaxContext::empty() // FIXME(jseyfried) improve `Self` hygiene
ident.span = if ident.name == keywords::SelfType.name() {
// FIXME(jseyfried) improve `Self` hygiene
ident.span.with_ctxt(SyntaxContext::empty())
} else {
ident.ctxt.modern()
ident.span.modern()
}
}
@ -1931,10 +1932,10 @@ impl<'a> Resolver<'a> {
module = match self.ribs[ns][i].kind {
ModuleRibKind(module) => module,
MacroDefinition(def) if def == self.macro_def(ident.ctxt) => {
MacroDefinition(def) if def == self.macro_def(ident.span.ctxt()) => {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
ident.ctxt.remove_mark();
ident.span.remove_mark();
continue
}
_ => continue,
@ -1954,9 +1955,9 @@ impl<'a> Resolver<'a> {
}
}
ident.ctxt = ident.ctxt.modern();
ident.span = ident.span.modern();
loop {
module = unwrap_or!(self.hygienic_lexical_parent(module, &mut ident.ctxt), break);
module = unwrap_or!(self.hygienic_lexical_parent(module, &mut ident.span), break);
let orig_current_module = self.current_module;
self.current_module = module; // Lexical resolutions can never be a privacy error.
let result = self.resolve_ident_in_module_unadjusted(
@ -1980,10 +1981,10 @@ impl<'a> Resolver<'a> {
}
}
fn hygienic_lexical_parent(&mut self, mut module: Module<'a>, ctxt: &mut SyntaxContext)
fn hygienic_lexical_parent(&mut self, mut module: Module<'a>, span: &mut Span)
-> Option<Module<'a>> {
if !module.expansion.is_descendant_of(ctxt.outer()) {
return Some(self.macro_def_scope(ctxt.remove_mark()));
if !module.expansion.is_descendant_of(span.ctxt().outer()) {
return Some(self.macro_def_scope(span.remove_mark()));
}
if let ModuleKind::Block(..) = module.kind {
@ -1995,7 +1996,7 @@ impl<'a> Resolver<'a> {
let parent_expansion = parent.expansion.modern();
if module_expansion.is_descendant_of(parent_expansion) &&
parent_expansion != module_expansion {
return if parent_expansion.is_descendant_of(ctxt.outer()) {
return if parent_expansion.is_descendant_of(span.ctxt().outer()) {
Some(parent)
} else {
None
@ -2016,9 +2017,9 @@ impl<'a> Resolver<'a> {
record_used: bool,
span: Span)
-> Result<&'a NameBinding<'a>, Determinacy> {
ident.ctxt = ident.ctxt.modern();
ident.span = ident.span.modern();
let orig_current_module = self.current_module;
if let Some(def) = ident.ctxt.adjust(module.expansion) {
if let Some(def) = ident.span.adjust(module.expansion) {
self.current_module = self.macro_def_scope(def);
}
let result = self.resolve_ident_in_module_unadjusted(
@ -2108,8 +2109,8 @@ impl<'a> Resolver<'a> {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
MacroDefinition(def) => {
if def == self.macro_def(ident.ctxt) {
ident.ctxt.remove_mark();
if def == self.macro_def(ident.span.ctxt()) {
ident.span.remove_mark();
}
}
_ => {
@ -2873,7 +2874,7 @@ impl<'a> Resolver<'a> {
}
if path.len() == 1 && this.self_type_is_available(span) {
if let Some(candidate) = this.lookup_assoc_candidate(ident.node, ns, is_expected) {
let self_is_available = this.self_value_is_available(path[0].node.ctxt, span);
let self_is_available = this.self_value_is_available(path[0].node.span, span);
match candidate {
AssocSuggestion::Field => {
err.span_suggestion(span, "try",
@ -3084,9 +3085,9 @@ impl<'a> Resolver<'a> {
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
fn self_value_is_available(&mut self, ctxt: SyntaxContext, span: Span) -> bool {
let ident = Ident { name: keywords::SelfValue.name(), ctxt: ctxt };
let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, false, span);
fn self_value_is_available(&mut self, self_span: Span, path_span: Span) -> bool {
let ident = Ident::new(keywords::SelfValue.name(), self_span);
let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, false, path_span);
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
@ -3219,11 +3220,11 @@ impl<'a> Resolver<'a> {
let name = ident.node.name;
if i == 0 && ns == TypeNS && name == keywords::SelfValue.name() {
let mut ctxt = ident.node.ctxt.modern();
let mut ctxt = ident.node.span.ctxt().modern();
module = Some(self.resolve_self(&mut ctxt, self.current_module));
continue
} else if allow_super && ns == TypeNS && name == keywords::Super.name() {
let mut ctxt = ident.node.ctxt.modern();
let mut ctxt = ident.node.span.ctxt().modern();
let self_module = match i {
0 => self.resolve_self(&mut ctxt, self.current_module),
_ => module.unwrap(),
@ -3245,11 +3246,11 @@ impl<'a> Resolver<'a> {
(i == 1 && name == keywords::Crate.name() &&
path[0].node.name == keywords::CrateRoot.name()) {
// `::a::b` or `::crate::a::b`
module = Some(self.resolve_crate_root(ident.node.ctxt, false));
module = Some(self.resolve_crate_root(ident.node.span.ctxt(), false));
continue
} else if i == 0 && name == keywords::DollarCrate.name() {
// `$crate::a::b`
module = Some(self.resolve_crate_root(ident.node.ctxt, true));
module = Some(self.resolve_crate_root(ident.node.span.ctxt(), true));
continue
} else if i == 1 && !token::is_path_segment_keyword(ident.node) {
let prev_name = path[0].node.name;
@ -3771,12 +3772,12 @@ impl<'a> Resolver<'a> {
}
}
ident.ctxt = ident.ctxt.modern();
ident.span = ident.span.modern();
let mut search_module = self.current_module;
loop {
self.get_traits_in_module_containing_item(ident, ns, search_module, &mut found_traits);
search_module =
unwrap_or!(self.hygienic_lexical_parent(search_module, &mut ident.ctxt), break);
unwrap_or!(self.hygienic_lexical_parent(search_module, &mut ident.span), break);
}
if let Some(prelude) = self.prelude {
@ -3808,7 +3809,7 @@ impl<'a> Resolver<'a> {
for &(trait_name, binding) in traits.as_ref().unwrap().iter() {
let module = binding.module().unwrap();
let mut ident = ident;
if ident.ctxt.glob_adjust(module.expansion, binding.span.ctxt().modern()).is_none() {
if ident.span.glob_adjust(module.expansion, binding.span.ctxt().modern()).is_none() {
continue
}
if self.resolve_ident_in_module_unadjusted(module, ident, ns, false, false, module.span)

View File

@ -140,7 +140,7 @@ impl<'a> base::Resolver for Resolver<'a> {
let ident = path.segments[0].identifier;
if ident.name == keywords::DollarCrate.name() {
path.segments[0].identifier.name = keywords::CrateRoot.name();
let module = self.0.resolve_crate_root(ident.ctxt, true);
let module = self.0.resolve_crate_root(ident.span.ctxt(), true);
if !module.is_local() {
let span = path.segments[0].span;
path.segments.insert(1, match module.kind {
@ -534,7 +534,7 @@ impl<'a> Resolver<'a> {
}
module = match module {
Some(module) => self.hygienic_lexical_parent(module, &mut ident.ctxt),
Some(module) => self.hygienic_lexical_parent(module, &mut ident.span),
None => return potential_illegal_shadower,
}
}

View File

@ -238,7 +238,7 @@ impl<'a> Resolver<'a> {
}
let module = unwrap_or!(directive.imported_module.get(), return Err(Undetermined));
let (orig_current_module, mut ident) = (self.current_module, ident.modern());
match ident.ctxt.glob_adjust(module.expansion, directive.span.ctxt().modern()) {
match ident.span.glob_adjust(module.expansion, directive.span.ctxt().modern()) {
Some(Some(def)) => self.current_module = self.macro_def_scope(def),
Some(None) => {}
None => continue,
@ -398,7 +398,7 @@ impl<'a> Resolver<'a> {
// Define `binding` in `module`s glob importers.
for directive in module.glob_importers.borrow_mut().iter() {
let mut ident = ident.modern();
let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
let scope = match ident.span.reverse_glob_adjust(module.expansion,
directive.span.ctxt().modern()) {
Some(Some(def)) => self.macro_def_scope(def),
Some(None) => directive.parent,
@ -623,7 +623,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
"crate root imports need to be explicitly named: \
`use crate as name;`".to_string()));
} else {
Some(self.resolve_crate_root(source.ctxt.modern(), false))
Some(self.resolve_crate_root(source.span.ctxt().modern(), false))
}
} else if is_extern && !token::is_path_segment_keyword(source) {
let crate_id =
@ -860,7 +860,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
resolution.borrow().binding().map(|binding| (ident, binding))
}).collect::<Vec<_>>();
for ((mut ident, ns), binding) in bindings {
let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
let scope = match ident.span.reverse_glob_adjust(module.expansion,
directive.span.ctxt().modern()) {
Some(Some(def)) => self.macro_def_scope(def),
Some(None) => self.current_module,

View File

@ -3213,10 +3213,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
if !tuple_like { continue }
debug!("tuple struct named {:?}", base_t);
let ident = ast::Ident {
name: Symbol::intern(&idx.node.to_string()),
ctxt: idx.span.ctxt().modern(),
};
let ident =
ast::Ident::new(Symbol::intern(&idx.node.to_string()), idx.span.modern());
let (ident, def_scope) =
self.tcx.adjust_ident(ident, base_def.did, self.body_id);
let fields = &base_def.non_enum_variant().fields;

View File

@ -150,7 +150,7 @@ impl PathSegment {
}
pub fn crate_root(span: Span) -> Self {
PathSegment {
identifier: Ident { ctxt: span.ctxt(), ..keywords::CrateRoot.ident() },
identifier: Ident::new(keywords::CrateRoot.name(), span),
span,
parameters: None,
}

View File

@ -876,8 +876,8 @@ impl<'a> ExtCtxt<'a> {
ast::Ident::from_str(st)
}
pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
let def_site = SyntaxContext::empty().apply_mark(self.current_expansion.mark);
iter::once(Ident { ctxt: def_site, ..keywords::DollarCrate.ident() })
let def_site = DUMMY_SP.apply_mark(self.current_expansion.mark);
iter::once(Ident::new(keywords::DollarCrate.name(), def_site))
.chain(components.iter().map(|s| self.ident_of(s)))
.collect()
}

View File

@ -1378,7 +1378,7 @@ pub struct Marker(pub Mark);
impl Folder for Marker {
fn fold_ident(&mut self, mut ident: Ident) -> Ident {
ident.ctxt = ident.ctxt.apply_mark(self.0);
ident.span = ident.span.apply_mark(self.0);
ident
}

View File

@ -289,14 +289,11 @@ where
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invokation.
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let (ident, _) = token.ident().unwrap();
let (ident, is_raw) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
if ident.name == keywords::Crate.name() {
let ident = ast::Ident {
name: keywords::DollarCrate.name(),
..ident
};
TokenTree::Token(span, token::Ident(ident, false))
if ident.name == keywords::Crate.name() && !is_raw {
let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span);
TokenTree::Token(span, token::Ident(ident, is_raw))
} else {
TokenTree::MetaVar(span, ident)
}

View File

@ -166,7 +166,7 @@ pub fn transcribe(cx: &ExtCtxt,
}
} else {
let ident =
Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
result.push(TokenTree::Token(sp, token::Dollar).into());
result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());

View File

@ -76,7 +76,7 @@ impl<'a> StringReader<'a> {
fn mk_ident(&self, string: &str) -> Ident {
let mut ident = Ident::from_str(string);
if let Some(span) = self.override_span {
ident.ctxt = span.ctxt();
ident.span = span;
}
ident
}

View File

@ -743,7 +743,7 @@ pub trait PrintState<'a> {
segment.identifier.name != keywords::DollarCrate.name() {
self.writer().word(&segment.identifier.name.as_str())?;
} else if segment.identifier.name == keywords::DollarCrate.name() {
self.print_dollar_crate(segment.identifier.ctxt)?;
self.print_dollar_crate(segment.identifier.span.ctxt())?;
}
}
self.writer().space()?;
@ -2424,7 +2424,7 @@ impl<'a> State<'a> {
self.print_path_parameters(parameters, colons_before_params)?;
}
} else if segment.identifier.name == keywords::DollarCrate.name() {
self.print_dollar_crate(segment.identifier.ctxt)?;
self.print_dollar_crate(segment.identifier.span.ctxt())?;
}
Ok(())
}

View File

@ -9,6 +9,7 @@
// except according to those terms.
use syntax::ast;
use syntax::codemap::DUMMY_SP;
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::feature_gate;
@ -16,7 +17,6 @@ use syntax::parse::token;
use syntax::ptr::P;
use syntax_pos::Span;
use syntax_pos::symbol::Symbol;
use syntax_pos::hygiene::SyntaxContext;
use syntax::tokenstream::TokenTree;
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
@ -53,11 +53,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
}
}
}
let res = ast::Ident {
name: Symbol::intern(&res_str),
ctxt: SyntaxContext::empty().apply_mark(cx.current_expansion.mark),
};
let res = ast::Ident::new(Symbol::intern(&res_str),
DUMMY_SP.apply_mark(cx.current_expansion.mark));
struct Result {
ident: ast::Ident,
span: Span,

View File

@ -18,10 +18,9 @@ use syntax::ast;
use syntax::ast::{Expr, GenericParam, Generics, Ident, SelfKind};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::codemap::respan;
use syntax::codemap::{respan, DUMMY_SP};
use syntax::ptr::P;
use syntax_pos::Span;
use syntax_pos::hygiene::SyntaxContext;
use syntax_pos::symbol::keywords;
/// The types of pointers
@ -93,8 +92,8 @@ impl<'a> Path<'a> {
PathKind::Global => cx.path_all(span, true, idents, lt, tys, Vec::new()),
PathKind::Local => cx.path_all(span, false, idents, lt, tys, Vec::new()),
PathKind::Std => {
let def_site = SyntaxContext::empty().apply_mark(cx.current_expansion.mark);
idents.insert(0, Ident { ctxt: def_site, ..keywords::DollarCrate.ident() });
let def_site = DUMMY_SP.apply_mark(cx.current_expansion.mark);
idents.insert(0, Ident::new(keywords::DollarCrate.name(), def_site));
cx.path_all(span, false, idents, lt, tys, Vec::new())
}
}

View File

@ -123,7 +123,7 @@ pub struct HygieneData {
marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
gensym_to_ctxt: HashMap<Symbol, SyntaxContext>,
gensym_to_ctxt: HashMap<Symbol, Span>,
}
impl HygieneData {
@ -461,7 +461,7 @@ impl Symbol {
pub fn from_ident(ident: Ident) -> Symbol {
HygieneData::with(|data| {
let gensym = ident.name.gensymed();
data.gensym_to_ctxt.insert(gensym, ident.ctxt);
data.gensym_to_ctxt.insert(gensym, ident.span);
gensym
})
}
@ -469,7 +469,7 @@ impl Symbol {
pub fn to_ident(self) -> Ident {
HygieneData::with(|data| {
match data.gensym_to_ctxt.get(&self) {
Some(&ctxt) => Ident { name: self.interned(), ctxt: ctxt },
Some(&span) => Ident::new(self.interned(), span),
None => Ident::with_empty_ctxt(self),
}
})

View File

@ -50,7 +50,7 @@ extern crate serialize as rustc_serialize; // used by deriving
extern crate unicode_width;
pub mod hygiene;
pub use hygiene::{SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan, CompilerDesugaringKind};
pub use hygiene::{Mark, SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan, CompilerDesugaringKind};
mod span_encoding;
pub use span_encoding::{Span, DUMMY_SP};
@ -422,6 +422,52 @@ impl Span {
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt },
)
}
#[inline]
pub fn apply_mark(self, mark: Mark) -> Span {
let span = self.data();
span.with_ctxt(span.ctxt.apply_mark(mark))
}
#[inline]
pub fn remove_mark(&mut self) -> Mark {
let mut span = self.data();
let mark = span.ctxt.remove_mark();
*self = Span::new(span.lo, span.hi, span.ctxt);
mark
}
#[inline]
pub fn adjust(&mut self, expansion: Mark) -> Option<Mark> {
let mut span = self.data();
let mark = span.ctxt.adjust(expansion);
*self = Span::new(span.lo, span.hi, span.ctxt);
mark
}
#[inline]
pub fn glob_adjust(&mut self, expansion: Mark, glob_ctxt: SyntaxContext)
-> Option<Option<Mark>> {
let mut span = self.data();
let mark = span.ctxt.glob_adjust(expansion, glob_ctxt);
*self = Span::new(span.lo, span.hi, span.ctxt);
mark
}
#[inline]
pub fn reverse_glob_adjust(&mut self, expansion: Mark, glob_ctxt: SyntaxContext)
-> Option<Option<Mark>> {
let mut span = self.data();
let mark = span.ctxt.reverse_glob_adjust(expansion, glob_ctxt);
*self = Span::new(span.lo, span.hi, span.ctxt);
mark
}
#[inline]
pub fn modern(self) -> Span {
let span = self.data();
span.with_ctxt(span.ctxt.modern())
}
}
#[derive(Clone, Debug)]

View File

@ -13,21 +13,27 @@
//! type, and vice versa.
use hygiene::SyntaxContext;
use GLOBALS;
use {Span, DUMMY_SP, GLOBALS};
use serialize::{Decodable, Decoder, Encodable, Encoder};
use std::collections::HashMap;
use std::fmt;
use std::hash::{Hash, Hasher};
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Copy, Clone, Eq)]
pub struct Ident {
pub name: Symbol,
pub ctxt: SyntaxContext,
pub span: Span,
}
impl Ident {
#[inline]
pub const fn new(name: Symbol, span: Span) -> Ident {
Ident { name, span }
}
#[inline]
pub const fn with_empty_ctxt(name: Symbol) -> Ident {
Ident { name: name, ctxt: SyntaxContext::empty() }
Ident::new(name, DUMMY_SP)
}
/// Maps a string to an identifier with an empty syntax context.
@ -36,17 +42,30 @@ impl Ident {
}
pub fn without_first_quote(&self) -> Ident {
Ident { name: Symbol::from(self.name.as_str().trim_left_matches('\'')), ctxt: self.ctxt }
Ident::new(Symbol::from(self.name.as_str().trim_left_matches('\'')), self.span)
}
pub fn modern(self) -> Ident {
Ident { name: self.name, ctxt: self.ctxt.modern() }
Ident::new(self.name, self.span.modern())
}
}
impl PartialEq for Ident {
fn eq(&self, rhs: &Self) -> bool {
self.name == rhs.name && self.span.ctxt() == rhs.span.ctxt()
}
}
impl Hash for Ident {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name.hash(state);
self.span.ctxt().hash(state);
}
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}{:?}", self.name, self.ctxt)
write!(f, "{}{:?}", self.name, self.span.ctxt())
}
}
@ -58,7 +77,7 @@ impl fmt::Display for Ident {
impl Encodable for Ident {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
if self.ctxt.modern() == SyntaxContext::empty() {
if self.span.ctxt().modern() == SyntaxContext::empty() {
s.emit_str(&self.name.as_str())
} else { // FIXME(jseyfried) intercrate hygiene
let mut string = "#".to_owned();