mv codemap() source_map()

This commit is contained in:
Donato Sciarra 2018-08-18 12:14:09 +02:00
parent 82607d2cf3
commit d3fe97f3d3
78 changed files with 209 additions and 209 deletions

View File

@ -1186,7 +1186,7 @@ pub mod __internal {
use super::{TokenStream, LexError, Span};
pub fn lookup_char_pos(pos: BytePos) -> Loc {
with_sess(|sess, _| sess.codemap().lookup_char_pos(pos))
with_sess(|sess, _| sess.source_map().lookup_char_pos(pos))
}
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {

View File

@ -3621,7 +3621,7 @@ impl<'a> LoweringContext<'a> {
let tail = block.expr.take().map_or_else(
|| {
let LoweredNodeId { node_id, hir_id } = this.next_id();
let span = this.sess.codemap().end_point(unstable_span);
let span = this.sess.source_map().end_point(unstable_span);
hir::Expr {
id: node_id,
span,

View File

@ -1202,7 +1202,7 @@ pub fn map_crate<'hir>(sess: &::session::Session,
let cmdline_args = sess.opts.dep_tracking_hash();
collector.finalize_and_compute_crate_hash(crate_disambiguator,
cstore,
sess.codemap(),
sess.source_map(),
cmdline_args)
};

View File

@ -101,7 +101,7 @@ impl<'a> StableHashingContext<'a> {
definitions,
cstore,
caching_codemap: None,
raw_codemap: sess.codemap(),
raw_codemap: sess.source_map(),
hash_spans: hash_spans_initial,
hash_bodies: true,
node_id_hashing_mode: NodeIdHashingMode::HashDefPath,
@ -169,7 +169,7 @@ impl<'a> StableHashingContext<'a> {
}
#[inline]
pub fn codemap(&mut self) -> &mut CachingCodemapView<'a> {
pub fn source_map(&mut self) -> &mut CachingCodemapView<'a> {
match self.caching_codemap {
Some(ref mut cm) => {
cm
@ -340,7 +340,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
}
let (file_lo, line_lo, col_lo) = match hcx.codemap()
let (file_lo, line_lo, col_lo) = match hcx.source_map()
.byte_pos_to_line_and_col(span.lo) {
Some(pos) => pos,
None => {

View File

@ -189,7 +189,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
self,
region: ty::Region<'tcx>,
) -> (String, Option<Span>) {
let cm = self.sess.codemap();
let cm = self.sess.source_map();
let scope = region.free_region_binding_scope(self);
let node = self.hir.as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
@ -286,7 +286,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
}
fn explain_span(self, heading: &str, span: Span) -> (String, Option<Span>) {
let lo = self.sess.codemap().lookup_char_pos_adj(span.lo());
let lo = self.sess.source_map().lookup_char_pos_adj(span.lo());
(
format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize() + 1),
Some(span),
@ -502,14 +502,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
ObligationCauseCode::MatchExpressionArm { arm_span, source } => match source {
hir::MatchSource::IfLetDesugar { .. } => {
let msg = "`if let` arm with an incompatible type";
if self.tcx.sess.codemap().is_multiline(arm_span) {
if self.tcx.sess.source_map().is_multiline(arm_span) {
err.span_note(arm_span, msg);
} else {
err.span_label(arm_span, msg);
}
},
hir::MatchSource::TryDesugar => { // Issue #51632
if let Ok(try_snippet) = self.tcx.sess.codemap().span_to_snippet(arm_span) {
if let Ok(try_snippet) = self.tcx.sess.source_map().span_to_snippet(arm_span) {
err.span_suggestion_with_applicability(
arm_span,
"try wrapping with a success variant",
@ -520,7 +520,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
},
_ => {
let msg = "match arm with an incompatible type";
if self.tcx.sess.codemap().is_multiline(arm_span) {
if self.tcx.sess.source_map().is_multiline(arm_span) {
err.span_note(arm_span, msg);
} else {
err.span_label(arm_span, msg);
@ -1136,8 +1136,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let sp = if has_bounds {
sp.to(self.tcx
.sess
.codemap()
.next_point(self.tcx.sess.codemap().next_point(sp)))
.source_map()
.next_point(self.tcx.sess.source_map().next_point(sp)))
} else {
sp
};

View File

@ -60,7 +60,7 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
}) => name.to_string(),
_ => "'_".to_owned(),
};
if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(return_sp) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(return_sp) {
err.span_suggestion(
return_sp,
&format!(

View File

@ -420,7 +420,7 @@ impl BuiltinLintDiagnostics {
match self {
BuiltinLintDiagnostics::Normal => (),
BuiltinLintDiagnostics::BareTraitObject(span, is_global) => {
let (sugg, app) = match sess.codemap().span_to_snippet(span) {
let (sugg, app) = match sess.source_map().span_to_snippet(span) {
Ok(ref s) if is_global => (format!("dyn ({})", s),
Applicability::MachineApplicable),
Ok(s) => (format!("dyn {}", s), Applicability::MachineApplicable),
@ -429,7 +429,7 @@ impl BuiltinLintDiagnostics {
db.span_suggestion_with_applicability(span, "use `dyn`", sugg, app);
}
BuiltinLintDiagnostics::AbsPathWithModule(span) => {
let (sugg, app) = match sess.codemap().span_to_snippet(span) {
let (sugg, app) = match sess.source_map().span_to_snippet(span) {
Ok(ref s) => {
// FIXME(Manishearth) ideally the emitting code
// can tell us whether or not this is global
@ -462,7 +462,7 @@ impl BuiltinLintDiagnostics {
// When possible, prefer a suggestion that replaces the whole
// `Path<T>` expression with `Path<'_, T>`, rather than inserting `'_, `
// at a point (which makes for an ugly/confusing label)
if let Ok(snippet) = sess.codemap().span_to_snippet(path_span) {
if let Ok(snippet) = sess.source_map().span_to_snippet(path_span) {
// But our spans can get out of whack due to macros; if the place we think
// we want to insert `'_` isn't even within the path expression's span, we
// should bail out of making any suggestion rather than panicking on a

View File

@ -754,7 +754,7 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool {
None => return true,
};
match sess.codemap().span_to_snippet(def_site) {
match sess.source_map().span_to_snippet(def_site) {
Ok(code) => !code.starts_with("macro_rules"),
// no snippet = external macro or compiler-builtin expansion
Err(_) => true,

View File

@ -551,7 +551,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> {
hir::ItemKind::Struct(..) |
hir::ItemKind::Union(..) |
hir::ItemKind::Trait(..) |
hir::ItemKind::Impl(..) => self.tcx.sess.codemap().def_span(item.span),
hir::ItemKind::Impl(..) => self.tcx.sess.source_map().def_span(item.span),
_ => item.span,
};
let participle = match item.node {
@ -612,7 +612,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> {
}
hir::ImplItemKind::Method(_, body_id) => {
if !self.symbol_is_live(impl_item.id, None) {
let span = self.tcx.sess.codemap().def_span(impl_item.span);
let span = self.tcx.sess.source_map().def_span(impl_item.span);
self.warn_dead_code(impl_item.id, span, impl_item.ident.name, "method", "used");
}
self.visit_nested_body(body_id)

View File

@ -157,7 +157,7 @@ enum LiveNodeKind {
}
fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt) -> String {
let cm = tcx.sess.codemap();
let cm = tcx.sess.source_map();
match lnk {
FreeVarNode(s) => {
format!("Free var node [{}]", cm.span_to_string(s))

View File

@ -1314,7 +1314,7 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> {
debug!("visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})",
owner_id,
self.tcx.sess.codemap().span_to_string(body.value.span),
self.tcx.sess.source_map().span_to_string(body.value.span),
body_id,
self.cx.parent);

View File

@ -2457,7 +2457,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
"insert_lifetime: {} resolved to {:?} span={:?}",
self.tcx.hir.node_to_string(lifetime_ref.id),
def,
self.tcx.sess.codemap().span_to_string(lifetime_ref.span)
self.tcx.sess.source_map().span_to_string(lifetime_ref.span)
);
self.map.defs.insert(lifetime_ref.id, def);

View File

@ -685,7 +685,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
};
let msp: MultiSpan = span.into();
let cm = &self.sess.parse_sess.codemap();
let cm = &self.sess.parse_sess.source_map();
let span_key = msp.primary_span().and_then(|sp: Span|
if !sp.is_dummy() {
let file = cm.lookup_char_pos(sp.lo()).file;

View File

@ -484,8 +484,8 @@ impl Session {
);
}
pub fn codemap<'a>(&'a self) -> &'a source_map::SourceMap {
self.parse_sess.codemap()
pub fn source_map<'a>(&'a self) -> &'a source_map::SourceMap {
self.parse_sess.source_map()
}
pub fn verbose(&self) -> bool {
self.opts.debugging_opts.verbose

View File

@ -528,12 +528,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
-> DiagnosticBuilder<'tcx>
{
let msg = "impl has stricter requirements than trait";
let sp = self.tcx.sess.codemap().def_span(error_span);
let sp = self.tcx.sess.source_map().def_span(error_span);
let mut err = struct_span_err!(self.tcx.sess, sp, E0276, "{}", msg);
if let Some(trait_item_span) = self.tcx.hir.span_if_local(trait_item_def_id) {
let span = self.tcx.sess.codemap().def_span(trait_item_span);
let span = self.tcx.sess.source_map().def_span(trait_item_span);
err.span_label(span, format!("definition of `{}` from trait", item_name));
}
@ -715,7 +715,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => {
let found_kind = self.closure_kind(closure_def_id, closure_substs).unwrap();
let closure_span = self.tcx.sess.codemap()
let closure_span = self.tcx.sess.source_map()
.def_span(self.tcx.hir.span_if_local(closure_def_id).unwrap());
let node_id = self.tcx.hir.as_local_node_id(closure_def_id).unwrap();
let mut err = struct_span_err!(
@ -792,7 +792,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
};
let found_span = found_did.and_then(|did| {
self.tcx.hir.span_if_local(did)
}).map(|sp| self.tcx.sess.codemap().def_span(sp)); // the sp could be an fn def
}).map(|sp| self.tcx.sess.source_map().def_span(sp)); // the sp could be an fn def
let found = match found_trait_ref.skip_binder().substs.type_at(1).sty {
ty::TyTuple(ref tys) => tys.iter()
@ -867,7 +867,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
if let Some(hir::map::NodeLocal(ref local)) = self.tcx.hir.find(parent_node) {
if let Some(ref expr) = local.init {
if let hir::ExprKind::Index(_, _) = expr.node {
if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(expr.span) {
err.span_suggestion_with_applicability(
expr.span,
"consider borrowing here",
@ -890,7 +890,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let trait_ref = trait_ref.skip_binder();
let span = obligation.cause.span;
if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
let refs_number = snippet.chars()
.filter(|c| !c.is_whitespace())
.take_while(|c| *c == '&')
@ -909,7 +909,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
new_trait_ref.to_predicate());
if self.predicate_may_hold(&new_obligation) {
let sp = self.tcx.sess.codemap()
let sp = self.tcx.sess.source_map()
.span_take_while(span, |c| c.is_whitespace() || *c == '&');
let remove_refs = refs_remaining + 1;
@ -938,7 +938,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
node: hir::ExprKind::Closure(_, ref _decl, id, span, _),
..
}) => {
(self.tcx.sess.codemap().def_span(span), self.tcx.hir.body(id).arguments.iter()
(self.tcx.sess.source_map().def_span(span), self.tcx.hir.body(id).arguments.iter()
.map(|arg| {
if let hir::Pat {
node: hir::PatKind::Tuple(args, _),
@ -948,13 +948,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
ArgKind::Tuple(
Some(span),
args.iter().map(|pat| {
let snippet = self.tcx.sess.codemap()
let snippet = self.tcx.sess.source_map()
.span_to_snippet(pat.span).unwrap();
(snippet, "_".to_owned())
}).collect::<Vec<_>>(),
)
} else {
let name = self.tcx.sess.codemap()
let name = self.tcx.sess.source_map()
.span_to_snippet(arg.pat.span).unwrap();
ArgKind::Arg(name, "_".to_owned())
}
@ -976,7 +976,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
node: hir::TraitItemKind::Method(hir::MethodSig { ref decl, .. }, _),
..
}) => {
(self.tcx.sess.codemap().def_span(span), decl.inputs.iter()
(self.tcx.sess.source_map().def_span(span), decl.inputs.iter()
.map(|arg| match arg.clone().node {
hir::TyKind::Tup(ref tys) => ArgKind::Tuple(
Some(arg.span),
@ -995,13 +995,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
},
..
}) => {
(self.tcx.sess.codemap().def_span(span),
(self.tcx.sess.source_map().def_span(span),
fields.iter().map(|field| {
ArgKind::Arg(field.ident.to_string(), "_".to_string())
}).collect::<Vec<_>>())
}
hir::map::NodeStructCtor(ref variant_data) => {
(self.tcx.sess.codemap().def_span(self.tcx.hir.span(variant_data.id())),
(self.tcx.sess.source_map().def_span(self.tcx.hir.span(variant_data.id())),
variant_data.fields()
.iter().map(|_| ArgKind::Arg("_".to_owned(), "_".to_owned()))
.collect())
@ -1192,7 +1192,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
{
assert!(type_def_id.is_local());
let span = self.hir.span_if_local(type_def_id).unwrap();
let span = self.sess.codemap().def_span(span);
let span = self.sess.source_map().def_span(span);
let mut err = struct_span_err!(self.sess, span, E0072,
"recursive type `{}` has infinite size",
self.item_path_str(type_def_id));
@ -1210,7 +1210,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
-> DiagnosticBuilder<'tcx>
{
let trait_str = self.item_path_str(trait_def_id);
let span = self.sess.codemap().def_span(span);
let span = self.sess.source_map().def_span(span);
let mut err = struct_span_err!(
self.sess, span, E0038,
"the trait `{}` cannot be made into an object",
@ -1438,7 +1438,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let item_name = tcx.item_path_str(item_def_id);
let msg = format!("required by `{}`", item_name);
if let Some(sp) = tcx.hir.span_if_local(item_def_id) {
let sp = tcx.sess.codemap().def_span(sp);
let sp = tcx.sess.source_map().def_span(sp);
err.span_note(sp, &msg);
} else {
err.note(&msg);

View File

@ -142,7 +142,7 @@ impl<'tcx> ObligationCause<'tcx> {
ObligationCauseCode::CompareImplMethodObligation { .. } |
ObligationCauseCode::MainFunctionType |
ObligationCauseCode::StartFunctionType => {
tcx.sess.codemap().def_span(self.span)
tcx.sess.source_map().def_span(self.span)
}
_ => self.span,
}

View File

@ -344,7 +344,7 @@ pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx
}),
if used_to_be_allowed { " (E0119)" } else { "" }
);
let impl_span = tcx.sess.codemap().def_span(
let impl_span = tcx.sess.source_map().def_span(
tcx.span_of_impl(impl_def_id).unwrap()
);
let mut err = if used_to_be_allowed {
@ -363,7 +363,7 @@ pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx
match tcx.span_of_impl(overlap.with_impl) {
Ok(span) => {
err.span_label(tcx.sess.codemap().def_span(span),
err.span_label(tcx.sess.source_map().def_span(span),
"first implementation here".to_string());
err.span_label(impl_span,
format!("conflicting implementation{}",

View File

@ -1818,7 +1818,7 @@ pub mod tls {
/// in librustc otherwise
fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
with(|tcx| {
write!(f, "{}", tcx.sess.codemap().span_to_string(span))
write!(f, "{}", tcx.sess.source_map().span_to_string(span))
})
}

View File

@ -252,7 +252,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
}
match (&values.found.sty, &values.expected.sty) { // Issue #53280
(ty::TyInfer(ty::IntVar(_)), ty::TyFloat(_)) => {
if let Ok(snippet) = self.sess.codemap().span_to_snippet(sp) {
if let Ok(snippet) = self.sess.source_map().span_to_snippet(sp) {
if snippet.chars().all(|c| c.is_digit(10) || c == '-' || c == '_') {
db.span_suggestion_with_applicability(
sp,

View File

@ -336,7 +336,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
self.push_item_path(buffer, parent_def_id);
let node_id = self.hir.as_local_node_id(impl_def_id).unwrap();
let item = self.hir.expect_item(node_id);
let span_str = self.sess.codemap().span_to_string(item.span);
let span_str = self.sess.source_map().span_to_string(item.span);
buffer.push(&format!("<impl at {}>", span_str));
}

View File

@ -140,7 +140,7 @@ impl<'sess> OnDiskCache<'sess> {
file_index_to_file: Lock::new(FxHashMap()),
prev_cnums: footer.prev_cnums,
cnum_map: Once::new(),
codemap: sess.codemap(),
codemap: sess.source_map(),
current_diagnostics: Lock::new(FxHashMap()),
query_result_index: footer.query_result_index.into_iter().collect(),
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
@ -178,7 +178,7 @@ impl<'sess> OnDiskCache<'sess> {
let mut file_to_file_index = FxHashMap();
let mut file_index_to_stable_id = FxHashMap();
for (index, file) in tcx.sess.codemap().files().iter().enumerate() {
for (index, file) in tcx.sess.source_map().files().iter().enumerate() {
let index = SourceFileIndex(index as u32);
let file_ptr: *const SourceFile = &**file as *const _;
file_to_file_index.insert(file_ptr, index);
@ -196,7 +196,7 @@ impl<'sess> OnDiskCache<'sess> {
expn_info_shorthands: FxHashMap(),
interpret_allocs: FxHashMap(),
interpret_allocs_inverse: Vec::new(),
codemap: CachingCodemapView::new(tcx.sess.codemap()),
codemap: CachingCodemapView::new(tcx.sess.source_map()),
file_to_file_index,
};

View File

@ -251,7 +251,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
assert!(!stack.is_empty());
let fix_span = |span: Span, query: &Query<'gcx>| {
self.sess.codemap().def_span(query.default_span(self, span))
self.sess.source_map().def_span(query.default_span(self, span))
};
// Disable naming impls with types in this path, since that
@ -299,7 +299,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
i,
query.info.query.name(),
query.info.query.describe(icx.tcx)));
db.set_span(icx.tcx.sess.codemap().def_span(query.info.span));
db.set_span(icx.tcx.sess.source_map().def_span(query.info.span));
icx.tcx.sess.diagnostic().force_print_db(db);
current_query = query.parent.clone();

View File

@ -594,7 +594,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
// 3. Where does old loan expire.
let previous_end_span =
Some(self.tcx().sess.codemap().end_point(
Some(self.tcx().sess.source_map().end_point(
old_loan.kill_scope.span(self.tcx(), &self.bccx.region_scope_tree)));
let mut err = match (new_loan.kind, old_loan.kind) {

View File

@ -79,7 +79,7 @@ fn report_move_errors<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, errors: &[MoveErr
// see `get_pattern_source()` for details
let initializer =
e.init.as_ref().expect("should have an initializer to get an error");
if let Ok(snippet) = bccx.tcx.sess.codemap().span_to_snippet(initializer.span) {
if let Ok(snippet) = bccx.tcx.sess.source_map().span_to_snippet(initializer.span) {
err.span_suggestion(initializer.span,
"consider using a reference instead",
format!("&{}", snippet));

View File

@ -848,7 +848,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
let sp = self.tcx.hir.span(node_id);
let fn_closure_msg = "`Fn` closures cannot capture their enclosing \
environment for modifications";
match (self.tcx.sess.codemap().span_to_snippet(sp), &err.cmt.cat) {
match (self.tcx.sess.source_map().span_to_snippet(sp), &err.cmt.cat) {
(_, &Categorization::Upvar(mc::Upvar {
kind: ty::ClosureKind::Fn, ..
})) => {
@ -1160,13 +1160,13 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
ref ty
}) = pty.node {
// Account for existing lifetimes when generating the message
let pointee_snippet = match self.tcx.sess.codemap().span_to_snippet(ty.span) {
let pointee_snippet = match self.tcx.sess.source_map().span_to_snippet(ty.span) {
Ok(snippet) => snippet,
_ => return None
};
let lifetime_snippet = if !lifetime.is_elided() {
format!("{} ", match self.tcx.sess.codemap().span_to_snippet(lifetime.span) {
format!("{} ", match self.tcx.sess.source_map().span_to_snippet(lifetime.span) {
Ok(lifetime_snippet) => lifetime_snippet,
_ => return None
})
@ -1277,7 +1277,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
binding_node_id: ast::NodeId) {
let let_span = self.tcx.hir.span(binding_node_id);
if let ty::BindByValue(..) = self.local_binding_mode(binding_node_id) {
if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(let_span) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(let_span) {
let (ty, is_implicit_self) = self.local_ty(binding_node_id);
if is_implicit_self && snippet != "self" {
// avoid suggesting `mut &self`.
@ -1315,7 +1315,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
let cmt_path_or_string = self.cmt_to_path_or_string(&err.cmt);
let suggestion =
match self.tcx.sess.codemap().span_to_snippet(err.span) {
match self.tcx.sess.source_map().span_to_snippet(err.span) {
Ok(string) => format!("move {}", string),
Err(_) => "move |<args>| <body>".to_string()
};
@ -1337,7 +1337,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
fn region_end_span(&self, region: ty::Region<'tcx>) -> Option<Span> {
match *region {
ty::ReScope(scope) => {
Some(self.tcx.sess.codemap().end_point(
Some(self.tcx.sess.source_map().end_point(
scope.span(self.tcx, &self.region_scope_tree)))
}
_ => None
@ -1368,7 +1368,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
db.span_label(*error_span, "cannot borrow as mutable");
} else if let Categorization::Local(local_id) = err.cmt.cat {
let span = self.tcx.hir.span(local_id);
if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
if snippet.starts_with("ref mut ") || snippet.starts_with("&mut ") {
db.span_label(*error_span, "cannot reborrow mutably");
db.span_label(*error_span, "try removing `&mut` here");

View File

@ -75,7 +75,7 @@ impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> {
}
let (hir_id, span) = ids[0];
let mut_span = tcx.sess.codemap().span_until_non_whitespace(span);
let mut_span = tcx.sess.source_map().span_until_non_whitespace(span);
// Ok, every name wasn't used mutably, so issue a warning that this
// didn't need to be mutable.

View File

@ -40,7 +40,7 @@ pub fn set_source_location(
};
let dbg_loc = if function_debug_context.source_locations_enabled.get() {
debug!("set_source_location: {}", bx.sess().codemap().span_to_string(span));
debug!("set_source_location: {}", bx.sess().source_map().span_to_string(span));
let loc = span_start(bx.cx, span);
InternalDebugLocation::new(scope.unwrap(), loc.line, loc.col.to_usize())
} else {

View File

@ -47,7 +47,7 @@ pub fn create_DIArray(
/// Return syntax_pos::Loc corresponding to the beginning of the span
pub fn span_start(cx: &CodegenCx, span: Span) -> syntax_pos::Loc {
cx.sess().codemap().lookup_char_pos(span.lo())
cx.sess().source_map().lookup_char_pos(span.lo())
}
#[inline]

View File

@ -359,7 +359,7 @@ impl FunctionCx<'a, 'll, 'tcx> {
self.set_debug_loc(&bx, terminator.source_info);
// Get the location information.
let loc = bx.sess().codemap().lookup_char_pos(span.lo());
let loc = bx.sess().source_map().lookup_char_pos(span.lo());
let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
let filename = C_str_slice(bx.cx, filename);
let line = C_u32(bx.cx, loc.line as u32);

View File

@ -166,7 +166,7 @@ impl FunctionCx<'a, 'll, 'tcx> {
let scope_metadata = self.scopes[scope_id].scope_metadata;
if pos < self.scopes[scope_id].file_start_pos ||
pos >= self.scopes[scope_id].file_end_pos {
let cm = self.cx.sess().codemap();
let cm = self.cx.sess().source_map();
let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate;
Some(debuginfo::extend_scope_to_file(self.cx,
scope_metadata.unwrap(),

View File

@ -695,7 +695,7 @@ pub fn phase_1_parse_input<'a>(
if sess.opts.debugging_opts.input_stats {
println!(
"Lines of code: {}",
sess.codemap().count_lines()
sess.source_map().count_lines()
);
println!("Pre-expansion node count: {}", count_nodes(&krate));
}
@ -1462,7 +1462,7 @@ fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &[Pa
let result = (|| -> io::Result<()> {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let files: Vec<String> = sess.codemap()
let files: Vec<String> = sess.source_map()
.files()
.iter()
.filter(|fmap| fmap.is_real_file())

View File

@ -915,7 +915,7 @@ pub fn fold_crate(sess: &Session, krate: ast::Crate, ppm: PpMode) -> ast::Crate
fn get_source(input: &Input, sess: &Session) -> (Vec<u8>, FileName) {
let src_name = driver::source_name(input);
let src = sess.codemap()
let src = sess.source_map()
.get_source_file(&src_name)
.unwrap()
.src
@ -954,7 +954,7 @@ pub fn print_after_parsing(sess: &Session,
s.call_with_pp_support(sess, None, move |annotation| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust::print_crate(sess.codemap(),
pprust::print_crate(sess.source_map(),
&sess.parse_sess,
krate,
src_name,
@ -1011,7 +1011,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
s.call_with_pp_support(sess, Some(hir_map), move |annotation| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust::print_crate(sess.codemap(),
pprust::print_crate(sess.source_map(),
&sess.parse_sess,
krate,
src_name,
@ -1035,7 +1035,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
move |annotation, krate| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust_hir::print_crate(sess.codemap(),
pprust_hir::print_crate(sess.source_map(),
&sess.parse_sess,
krate,
src_name,
@ -1076,7 +1076,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
let hir_map = annotation.hir_map().expect("-Z unpretty missing HIR map");
let mut pp_state = pprust_hir::State::new_from_input(sess.codemap(),
let mut pp_state = pprust_hir::State::new_from_input(sess.source_map(),
&sess.parse_sess,
src_name,
&mut rdr,

View File

@ -162,7 +162,7 @@ fn test_env_with_pool<F>(
&arenas,
resolutions,
hir_map,
OnDiskCache::new_empty(sess.codemap()),
OnDiskCache::new_empty(sess.source_map()),
"test_crate",
tx,
&outputs,

View File

@ -207,11 +207,11 @@ pub fn load_dep_graph(sess: &Session) ->
pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess> {
if sess.opts.incremental.is_none() ||
!sess.opts.debugging_opts.incremental_queries {
return OnDiskCache::new_empty(sess.codemap());
return OnDiskCache::new_empty(sess.source_map());
}
match load_data(sess.opts.debugging_opts.incremental_info, &query_cache_path(sess)) {
LoadResult::Ok{ data: (bytes, start_pos) } => OnDiskCache::new(sess, bytes, start_pos),
_ => OnDiskCache::new_empty(sess.codemap())
_ => OnDiskCache::new_empty(sess.source_map())
}
}

View File

@ -82,7 +82,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for WhileTrue {
if let ast::LitKind::Bool(true) = lit.node {
if lit.span.ctxt() == SyntaxContext::empty() {
let msg = "denote infinite loops with `loop { ... }`";
let condition_span = cx.tcx.sess.codemap().def_span(e.span);
let condition_span = cx.tcx.sess.source_map().def_span(e.span);
let mut err = cx.struct_span_lint(WHILE_TRUE, condition_span, msg);
err.span_suggestion_short_with_applicability(
condition_span,
@ -195,7 +195,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns {
let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS,
fieldpat.span,
&format!("the `{}:` in this pattern is redundant", ident));
let subspan = cx.tcx.sess.codemap().span_through_char(fieldpat.span, ':');
let subspan = cx.tcx.sess.source_map().span_through_char(fieldpat.span, ':');
err.span_suggestion_short_with_applicability(
subspan,
"remove this",
@ -367,7 +367,7 @@ impl MissingDoc {
let has_doc = attrs.iter().any(|a| has_doc(a));
if !has_doc {
cx.span_lint(MISSING_DOCS,
cx.tcx.sess.codemap().def_span(sp),
cx.tcx.sess.source_map().def_span(sp),
&format!("missing documentation for {}", desc));
}
}
@ -651,7 +651,7 @@ impl EarlyLintPass for AnonymousParameters {
if ident.name == keywords::Invalid.name() {
let ty_snip = cx
.sess
.codemap()
.source_map()
.span_to_snippet(arg.ty.span);
let (ty_snip, appl) = if let Ok(snip) = ty_snip {
@ -958,7 +958,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnconditionalRecursion {
// no break */ }`) shouldn't be linted unless it actually
// recurs.
if !reached_exit_without_self_call && !self_call_spans.is_empty() {
let sp = cx.tcx.sess.codemap().def_span(sp);
let sp = cx.tcx.sess.source_map().def_span(sp);
let mut db = cx.struct_span_lint(UNCONDITIONAL_RECURSION,
sp,
"function cannot return without recurring");
@ -1278,7 +1278,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidNoMangleItems {
let mut err = cx.struct_span_lint(NO_MANGLE_CONST_ITEMS, it.span, msg);
// account for "pub const" (#45562)
let start = cx.tcx.sess.codemap().span_to_snippet(it.span)
let start = cx.tcx.sess.source_map().span_to_snippet(it.span)
.map(|snippet| snippet.find("const").unwrap_or(0))
.unwrap_or(0) as u32;
// `const` is 5 chars
@ -1440,7 +1440,7 @@ impl UnreachablePub {
if span.ctxt().outer().expn_info().is_some() {
applicability = Applicability::MaybeIncorrect;
}
let def_span = cx.tcx.sess.codemap().def_span(span);
let def_span = cx.tcx.sess.source_map().def_span(span);
let mut err = cx.struct_span_lint(UNREACHABLE_PUB, def_span,
&format!("unreachable `pub` {}", what));
let replacement = if cx.tcx.features().crate_visibility_modifier {

View File

@ -300,7 +300,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits {
}
fn get_bin_hex_repr(cx: &LateContext, lit: &ast::Lit) -> Option<String> {
let src = cx.sess().codemap().span_to_snippet(lit.span).ok()?;
let src = cx.sess().source_map().span_to_snippet(lit.span).ok()?;
let firstch = src.chars().next()?;
if firstch == '0' {

View File

@ -463,7 +463,7 @@ impl cstore::CStore {
let (name, def) = data.get_macro(id.index);
let source_name = FileName::Macros(name.to_string());
let source_file = sess.parse_sess.codemap().new_source_file(source_name, def.body);
let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
let body = source_file_to_stream(&sess.parse_sess, source_file, None);

View File

@ -314,7 +314,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
bug!("Cannot decode Span without Session.")
};
let imported_source_files = self.cdata().imported_source_files(&sess.codemap());
let imported_source_files = self.cdata().imported_source_files(&sess.source_map());
let source_file = {
// Optimize for the case that most spans within a translated item
// originate from the same source_file.

View File

@ -158,7 +158,7 @@ impl<'a, 'tcx> SpecializedEncoder<Span> for EncodeContext<'a, 'tcx> {
debug_assert!(span.lo <= span.hi);
if !self.source_file_cache.contains(span.lo) {
let codemap = self.tcx.sess.codemap();
let codemap = self.tcx.sess.source_map();
let source_file_index = codemap.lookup_source_file_idx(span.lo);
self.source_file_cache = codemap.files()[source_file_index].clone();
}
@ -337,8 +337,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
self.lazy(definitions.def_path_table())
}
fn encode_codemap(&mut self) -> LazySeq<syntax_pos::SourceFile> {
let codemap = self.tcx.sess.codemap();
fn encode_source_map(&mut self) -> LazySeq<syntax_pos::SourceFile> {
let codemap = self.tcx.sess.source_map();
let all_source_files = codemap.files();
let (working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir.clone();
@ -420,7 +420,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
// Encode codemap
i = self.position();
let codemap = self.encode_codemap();
let codemap = self.encode_source_map();
let codemap_bytes = self.position() - i;
// Encode DefPathTable
@ -1842,7 +1842,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
lazy_state: LazyState::NoNode,
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
source_file_cache: tcx.sess.codemap().files()[0].clone(),
source_file_cache: tcx.sess.source_map().files()[0].clone(),
interpret_allocs: Default::default(),
interpret_allocs_inverse: Default::default(),
};

View File

@ -316,7 +316,7 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
}
let span = local_decl.source_info.span;
let mut_span = tcx.sess.codemap().span_until_non_whitespace(span);
let mut_span = tcx.sess.source_map().span_until_non_whitespace(span);
let mut err = tcx.struct_span_lint_node(
UNUSED_MUT,
@ -1509,7 +1509,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
debug!("check_for_invalidation_at_exit({:?}): INVALID", place);
// FIXME: should be talking about the region lifetime instead
// of just a span here.
let span = self.tcx.sess.codemap().end_point(span);
let span = self.tcx.sess.source_map().end_point(span);
self.report_borrowed_value_does_not_live_long_enough(
context,
borrow,

View File

@ -332,7 +332,7 @@ impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> {
err: &mut DiagnosticBuilder<'a>,
span: Span,
) {
let snippet = self.tcx.sess.codemap().span_to_snippet(span).unwrap();
let snippet = self.tcx.sess.source_map().span_to_snippet(span).unwrap();
match error {
GroupedMoveError::MovesFromPlace {
mut binds_to,
@ -394,7 +394,7 @@ impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> {
}))
) = bind_to.is_user_variable {
let pat_snippet = self
.tcx.sess.codemap()
.tcx.sess.source_map()
.span_to_snippet(pat_span)
.unwrap();
if pat_snippet.starts_with('&') {

View File

@ -268,7 +268,7 @@ impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> {
// a local variable, then just suggest the user remove it.
Place::Local(_)
if {
if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
snippet.starts_with("&mut ")
} else {
false
@ -406,7 +406,7 @@ fn suggest_ampmut_self<'cx, 'gcx, 'tcx>(
local_decl: &mir::LocalDecl<'tcx>,
) -> (Span, String) {
let sp = local_decl.source_info.span;
(sp, match tcx.sess.codemap().span_to_snippet(sp) {
(sp, match tcx.sess.source_map().span_to_snippet(sp) {
Ok(snippet) => {
let lt_pos = snippet.find('\'');
if let Some(lt_pos) = lt_pos {
@ -444,7 +444,7 @@ fn suggest_ampmut<'cx, 'gcx, 'tcx>(
let locations = mir.find_assignments(local);
if locations.len() > 0 {
let assignment_rhs_span = mir.source_info(locations[0]).span;
if let Ok(src) = tcx.sess.codemap().span_to_snippet(assignment_rhs_span) {
if let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span) {
if let (true, Some(ws_pos)) = (
src.starts_with("&'"),
src.find(|c: char| -> bool { c.is_whitespace() }),
@ -469,7 +469,7 @@ fn suggest_ampmut<'cx, 'gcx, 'tcx>(
None => local_decl.source_info.span,
};
if let Ok(src) = tcx.sess.codemap().span_to_snippet(highlight_span) {
if let Ok(src) = tcx.sess.source_map().span_to_snippet(highlight_span) {
if let (true, Some(ws_pos)) = (
src.starts_with("&'"),
src.find(|c: char| -> bool { c.is_whitespace() }),

View File

@ -189,7 +189,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
name: &InternedString,
diag: &mut DiagnosticBuilder<'_>,
) {
let cm = tcx.sess.codemap();
let cm = tcx.sess.source_map();
let scope = error_region.free_region_binding_scope(tcx);
let node = tcx.hir.as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
@ -383,7 +383,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
let region_name = self.synthesize_region_name(counter);
// Just grab the first character, the `&`.
let codemap = tcx.sess.codemap();
let codemap = tcx.sess.source_map();
let ampersand_span = codemap.start_point(hir_ty.span);
diag.span_label(
@ -593,7 +593,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
= tcx.hir.expect_expr(mir_node_id).node
{
(
tcx.sess.codemap().end_point(span),
tcx.sess.source_map().end_point(span),
if gen_move.is_some() { " of generator" } else { " of closure" }
)
} else {

View File

@ -732,7 +732,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
let region_scope_span = region_scope.span(self.hir.tcx(),
&self.hir.region_scope_tree);
// Attribute scope exit drops to scope's closing brace.
let scope_end = self.hir.tcx().sess.codemap().end_point(region_scope_span);
let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span);
scope.drops.push(DropData {
span: scope_end,

View File

@ -424,12 +424,12 @@ fn is_enclosed(tcx: TyCtxt,
}
fn report_unused_unsafe(tcx: TyCtxt, used_unsafe: &FxHashSet<ast::NodeId>, id: ast::NodeId) {
let span = tcx.sess.codemap().def_span(tcx.hir.span(id));
let span = tcx.sess.source_map().def_span(tcx.hir.span(id));
let msg = "unnecessary `unsafe` block";
let mut db = tcx.struct_span_lint_node(UNUSED_UNSAFE, id, span, msg);
db.span_label(span, msg);
if let Some((kind, id)) = is_enclosed(tcx, used_unsafe, id) {
db.span_label(tcx.sess.codemap().def_span(tcx.hir.span(id)),
db.span_label(tcx.sess.source_map().def_span(tcx.hir.span(id)),
format!("because it's nested under this `unsafe` {}", kind));
}
db.emit();

View File

@ -33,7 +33,7 @@ pub fn suggest_ref_mut<'cx, 'gcx, 'tcx>(
tcx: ty::TyCtxt<'cx, 'gcx, 'tcx>,
binding_span: Span,
) -> Option<(String)> {
let hi_src = tcx.sess.codemap().span_to_snippet(binding_span).unwrap();
let hi_src = tcx.sess.source_map().span_to_snippet(binding_span).unwrap();
if hi_src.starts_with("ref")
&& hi_src["ref".len()..].starts_with(Pattern_White_Space)
{

View File

@ -441,7 +441,7 @@ fn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {
format!(
"scope {} at {}",
scope.index(),
tcx.sess.codemap().span_to_string(span)
tcx.sess.source_map().span_to_string(span)
)
}

View File

@ -175,7 +175,7 @@ pub fn check_crate(resolver: &mut Resolver, krate: &ast::Crate) {
let ms = MultiSpan::from_spans(spans.clone());
let mut span_snippets = spans.iter()
.filter_map(|s| {
match visitor.session.codemap().span_to_snippet(*s) {
match visitor.session.source_map().span_to_snippet(*s) {
Ok(s) => Some(format!("`{}`", s)),
_ => None,
}

View File

@ -195,7 +195,7 @@ fn resolve_struct_error<'sess, 'a>(resolver: &'sess Resolver,
"can't use type parameters from outer function");
err.span_label(span, "use of type variable from outer function");
let cm = resolver.session.codemap();
let cm = resolver.session.source_map();
match outer_def {
Def::SelfTy(_, maybe_impl_defid) => {
if let Some(impl_span) = maybe_impl_defid.map_or(None,
@ -3085,7 +3085,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
// parser issue where a struct literal is being used on an expression
// where a brace being opened means a block is being started. Look
// ahead for the next text to see if `span` is followed by a `{`.
let cm = this.session.codemap();
let cm = this.session.source_map();
let mut sp = span;
loop {
sp = cm.next_point(sp);
@ -3212,7 +3212,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
err: &mut DiagnosticBuilder,
base_span: Span) {
debug!("type_ascription_suggetion {:?}", base_span);
let cm = self.session.codemap();
let cm = self.session.source_map();
debug!("self.current_type_ascription {:?}", self.current_type_ascription);
if let Some(sp) = self.current_type_ascription.last() {
let mut sp = *sp;
@ -4527,7 +4527,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
false => "defined",
};
let (name, span) = (ident.name, self.session.codemap().def_span(new_binding.span));
let (name, span) = (ident.name, self.session.source_map().def_span(new_binding.span));
if let Some(s) = self.name_already_seen.get(&name) {
if s == &span {
@ -4566,7 +4566,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
err.span_label(span, format!("`{}` re{} here", name, new_participle));
if !old_binding.span.is_dummy() {
err.span_label(self.session.codemap().def_span(old_binding.span),
err.span_label(self.session.source_map().def_span(old_binding.span),
format!("previous {} of the {} `{}` here", old_noun, old_kind, name));
}
@ -4578,7 +4578,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
old_binding
};
let cm = self.session.codemap();
let cm = self.session.source_map();
let rename_msg = "You can use `as` to change the binding name of the import";
if let (Ok(snippet), false) = (cm.span_to_snippet(binding.span),

View File

@ -1205,8 +1205,8 @@ impl<'a, 'b:'a, 'c: 'b> ImportResolver<'a, 'b, 'c> {
let enum_span = enum_resolution.borrow()
.binding.expect("binding should exist")
.span;
let enum_def_span = self.session.codemap().def_span(enum_span);
let enum_def_snippet = self.session.codemap()
let enum_def_span = self.session.source_map().def_span(enum_span);
let enum_def_snippet = self.session.source_map()
.span_to_snippet(enum_def_span).expect("snippet should exist");
// potentially need to strip extant `crate`/`pub(path)` for suggestion
let after_vis_index = enum_def_snippet.find("enum")

View File

@ -1368,7 +1368,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> Visitor<'l> for DumpVisitor<'l, 'tc
let qualname = format!("::{}", self.tcx.node_path_str(id));
let cm = self.tcx.sess.codemap();
let cm = self.tcx.sess.source_map();
let filename = cm.span_to_filename(span);
let data_id = ::id_from_node_id(id, &self.save_ctxt);
let children = m.items

View File

@ -95,7 +95,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
fn span_from_span(&self, span: Span) -> SpanData {
use rls_span::{Column, Row};
let cm = self.tcx.sess.codemap();
let cm = self.tcx.sess.source_map();
let start = cm.lookup_char_pos(span.lo());
let end = cm.lookup_char_pos(span.hi());
@ -122,7 +122,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
continue;
}
};
let lo_loc = self.span_utils.sess.codemap().lookup_char_pos(span.lo());
let lo_loc = self.span_utils.sess.source_map().lookup_char_pos(span.lo());
result.push(ExternalCrateData {
// FIXME: change file_name field to PathBuf in rls-data
// https://github.com/nrc/rls-data/issues/7
@ -268,7 +268,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
ast::ItemKind::Mod(ref m) => {
let qualname = format!("::{}", self.tcx.node_path_str(item.id));
let cm = self.tcx.sess.codemap();
let cm = self.tcx.sess.source_map();
let filename = cm.span_to_filename(m.inner);
let sub_span = self.span_utils

View File

@ -47,7 +47,7 @@ impl<'a> SpanUtils<'a> {
}
pub fn snippet(&self, span: Span) -> String {
match self.sess.codemap().span_to_snippet(span) {
match self.sess.source_map().span_to_snippet(span) {
Ok(s) => s,
Err(_) => String::new(),
}
@ -151,7 +151,7 @@ impl<'a> SpanUtils<'a> {
}
#[cfg(debug_assertions)] {
if angle_count != 0 || bracket_count != 0 {
let loc = self.sess.codemap().lookup_char_pos(span.lo());
let loc = self.sess.source_map().lookup_char_pos(span.lo());
span_bug!(
span,
"Mis-counted brackets when breaking path? Parsing '{}' \
@ -278,7 +278,7 @@ impl<'a> SpanUtils<'a> {
//If the span comes from a fake source_file, filter it.
if !self.sess
.codemap()
.source_map()
.lookup_char_pos(parent.lo())
.file
.is_real_file()

View File

@ -350,7 +350,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
if let Some(mut err) = err {
if is_arg {
if let PatKind::Binding(..) = inner.node {
if let Ok(snippet) = tcx.sess.codemap()
if let Ok(snippet) = tcx.sess.source_map()
.span_to_snippet(pat.span)
{
err.help(&format!("did you mean `{}: &{}`?",

View File

@ -221,7 +221,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> {
format!("cannot cast `{}` as `{}`",
fcx.ty_to_string(self.expr_ty),
cast_ty));
if let Ok(snippet) = fcx.sess().codemap().span_to_snippet(self.expr.span) {
if let Ok(snippet) = fcx.sess().source_map().span_to_snippet(self.expr.span) {
err.span_help(self.expr.span,
&format!("did you mean `*{}`?", snippet));
}
@ -325,7 +325,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> {
hir::MutImmutable => "",
};
if self.cast_ty.is_trait() {
match fcx.tcx.sess.codemap().span_to_snippet(self.cast_span) {
match fcx.tcx.sess.source_map().span_to_snippet(self.cast_span) {
Ok(s) => {
err.span_suggestion(self.cast_span,
"try casting to a reference instead",
@ -344,7 +344,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> {
}
}
ty::TyAdt(def, ..) if def.is_box() => {
match fcx.tcx.sess.codemap().span_to_snippet(self.cast_span) {
match fcx.tcx.sess.source_map().span_to_snippet(self.cast_span) {
Ok(s) => {
err.span_suggestion(self.cast_span,
"try casting to a `Box` instead",

View File

@ -40,7 +40,7 @@ pub fn compare_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
debug!("compare_impl_method(impl_trait_ref={:?})",
impl_trait_ref);
let impl_m_span = tcx.sess.codemap().def_span(impl_m_span);
let impl_m_span = tcx.sess.source_map().def_span(impl_m_span);
if let Err(ErrorReported) = compare_self_type(tcx,
impl_m,
@ -319,7 +319,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_m.ident);
if let TypeError::Mutability = terr {
if let Some(trait_err_span) = trait_err_span {
if let Ok(trait_err_str) = tcx.sess.codemap().span_to_snippet(trait_err_span) {
if let Ok(trait_err_str) = tcx.sess.source_map().span_to_snippet(trait_err_span) {
diag.span_suggestion(
impl_err_span,
"consider change the type to match the mutability in trait",
@ -386,7 +386,7 @@ fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// are zero. Since I don't quite know how to phrase things at
// the moment, give a kind of vague error message.
if trait_params != impl_params {
let def_span = tcx.sess.codemap().def_span(span);
let def_span = tcx.sess.source_map().def_span(span);
let span = tcx.hir.get_generics_span(impl_m.def_id).unwrap_or(def_span);
let mut err = struct_span_err!(
tcx.sess,
@ -397,7 +397,7 @@ fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
);
err.span_label(span, "lifetimes do not match method in trait");
if let Some(sp) = tcx.hir.span_if_local(trait_m.def_id) {
let def_sp = tcx.sess.codemap().def_span(sp);
let def_sp = tcx.sess.source_map().def_span(sp);
let sp = tcx.hir.get_generics_span(trait_m.def_id).unwrap_or(def_sp);
err.span_label(sp, "lifetimes in impl do not match this method in trait");
}
@ -770,7 +770,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// as another generic argument
let new_name = tcx
.sess
.codemap()
.source_map()
.span_to_snippet(trait_span)
.ok()?;
let trait_m = tcx.hir.as_local_node_id(trait_m.def_id)?;
@ -783,7 +783,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// and the opening paren of the argument list
let new_generics_span = tcx
.sess
.codemap()
.source_map()
.generate_fn_name_span(impl_span)?
.shrink_to_hi();
// in case there are generics, just replace them
@ -794,7 +794,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// replace with the generics from the trait
let new_generics = tcx
.sess
.codemap()
.source_map()
.span_to_snippet(trait_m.generics.span)
.ok()?;
@ -865,7 +865,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let bounds = bounds.first()?.span().to(bounds.last()?.span());
let bounds = tcx
.sess
.codemap()
.source_map()
.span_to_snippet(bounds)
.ok()?;

View File

@ -251,7 +251,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>)
-> Option<(Span, &'static str, String)> {
let cm = self.sess().codemap();
let cm = self.sess().source_map();
// Use the callsite's span if this is a macro call. #41858
let sp = cm.call_span_if_macro(expr.span);
if !cm.span_to_filename(sp).is_real() {
@ -405,7 +405,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
let needs_paren = expr.precedence().order() < (PREC_POSTFIX as i8);
if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
if let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) {
let msg = format!("you can cast an `{}` to `{}`", checked_ty, expected_ty);
let cast_suggestion = format!("{}{}{} as {}",
if needs_paren { "(" } else { "" },

View File

@ -132,7 +132,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
};
if let Some(note_span) = note_span {
// We have a span pointing to the method. Show note with snippet.
err.span_note(self.tcx.sess.codemap().def_span(note_span), &note_str);
err.span_note(self.tcx.sess.source_map().def_span(note_span), &note_str);
} else {
err.note(&note_str);
}
@ -141,7 +141,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
let item = self
.associated_item(trait_did, item_name, Namespace::Value)
.unwrap();
let item_span = self.tcx.sess.codemap()
let item_span = self.tcx.sess.source_map()
.def_span(self.tcx.def_span(item.def_id));
if sources.len() > 1 {
span_note!(err,
@ -246,7 +246,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
};
match expr.node {
hir::ExprKind::Lit(ref lit) => { // numeric literal
let snippet = tcx.sess.codemap().span_to_snippet(lit.span)
let snippet = tcx.sess.source_map().span_to_snippet(lit.span)
.unwrap_or("<numeric literal>".to_string());
err.span_suggestion(lit.span,
@ -261,9 +261,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
if let &hir::QPath::Resolved(_, ref path) = &qpath {
if let hir::def::Def::Local(node_id) = path.def {
let span = tcx.hir.span(node_id);
let snippet = tcx.sess.codemap().span_to_snippet(span)
let snippet = tcx.sess.source_map().span_to_snippet(span)
.unwrap();
let filename = tcx.sess.codemap().span_to_filename(span);
let filename = tcx.sess.source_map().span_to_filename(span);
let parent_node = self.tcx.hir.get(
self.tcx.hir.get_parent_node(node_id),
@ -320,7 +320,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
if let Some(def) = actual.ty_adt_def() {
if let Some(full_sp) = tcx.hir.span_if_local(def.did) {
let def_sp = tcx.sess.codemap().def_span(full_sp);
let def_sp = tcx.sess.source_map().def_span(full_sp);
err.span_label(def_sp, format!("{} `{}` not found {}",
item_kind,
item_name,
@ -341,7 +341,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
let variant = &def.non_enum_variant();
if let Some(index) = self.tcx.find_field_index(item_name, variant) {
let field = &variant.fields[index];
let snippet = tcx.sess.codemap().span_to_snippet(expr.span);
let snippet = tcx.sess.source_map().span_to_snippet(expr.span);
let expr_string = match snippet {
Ok(expr_string) => expr_string,
_ => "s".into(), // Default to a generic placeholder for the
@ -387,7 +387,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
}
if let Some(expr) = rcvr_expr {
if let Ok(expr_string) = tcx.sess.codemap().span_to_snippet(expr.span) {
if let Ok(expr_string) = tcx.sess.source_map().span_to_snippet(expr.span) {
report_function!(expr.span, expr_string);
} else if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) =
expr.node

View File

@ -1447,7 +1447,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
impl_id: DefId,
impl_trait_ref: ty::TraitRef<'tcx>,
impl_item_refs: &[hir::ImplItemRef]) {
let impl_span = tcx.sess.codemap().def_span(impl_span);
let impl_span = tcx.sess.source_map().def_span(impl_span);
// If the trait reference itself is erroneous (so the compilation is going
// to fail), skip checking the items here -- the `impl_item` table in `tcx`
@ -2668,11 +2668,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
if arg_count == 1 {" was"} else {"s were"}),
DiagnosticId::Error(error_code.to_owned()));
if let Some(def_s) = def_span.map(|sp| tcx.sess.codemap().def_span(sp)) {
if let Some(def_s) = def_span.map(|sp| tcx.sess.source_map().def_span(sp)) {
err.span_label(def_s, "defined here");
}
if sugg_unit {
let sugg_span = tcx.sess.codemap().end_point(expr_sp);
let sugg_span = tcx.sess.source_map().end_point(expr_sp);
// remove closing `)` from the span
let sugg_span = sugg_span.shrink_to_lo();
err.span_suggestion(
@ -2937,8 +2937,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
(ExpectIfCondition, &hir::ExprKind::Assign(ref lhs, ref rhs)) => {
let msg = "try comparing for equality";
if let (Ok(left), Ok(right)) = (
self.tcx.sess.codemap().span_to_snippet(lhs.span),
self.tcx.sess.codemap().span_to_snippet(rhs.span))
self.tcx.sess.source_map().span_to_snippet(lhs.span),
self.tcx.sess.source_map().span_to_snippet(rhs.span))
{
err.span_suggestion(expr.span, msg, format!("{} == {}", left, right));
} else {
@ -4232,7 +4232,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
if let hir::ExprKind::Lit(ref lit) = idx.node {
if let ast::LitKind::Int(i,
ast::LitIntType::Unsuffixed) = lit.node {
let snip = tcx.sess.codemap().span_to_snippet(base.span);
let snip = tcx.sess.source_map().span_to_snippet(base.span);
if let Ok(snip) = snip {
err.span_suggestion(expr.span,
"to access tuple elements, use",
@ -4629,7 +4629,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
err.span_suggestion(sp, msg, suggestion);
} else if !self.check_for_cast(err, expr, found, expected) {
let methods = self.get_conversion_methods(expr.span, expected, found);
if let Ok(expr_text) = self.sess().codemap().span_to_snippet(expr.span) {
if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) {
let suggestions = iter::repeat(expr_text).zip(methods.iter())
.filter_map(|(receiver, method)| {
let method_call = format!(".{}()", method.ident);
@ -4673,7 +4673,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
hir::ExprKind::Loop(..) |
hir::ExprKind::Match(..) |
hir::ExprKind::Block(..) => {
let sp = self.tcx.sess.codemap().next_point(cause_span);
let sp = self.tcx.sess.source_map().next_point(cause_span);
err.span_suggestion(sp,
"try adding a semicolon",
";".to_string());

View File

@ -253,7 +253,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
Err(()) => {
// error types are considered "builtin"
if !lhs_ty.references_error() {
let codemap = self.tcx.sess.codemap();
let codemap = self.tcx.sess.source_map();
match is_assign {
IsAssign::Yes => {
let mut err = struct_span_err!(self.tcx.sess, expr.span, E0368,
@ -420,7 +420,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
err: &mut errors::DiagnosticBuilder,
is_assign: bool,
) -> bool {
let codemap = self.tcx.sess.codemap();
let codemap = self.tcx.sess.source_map();
let msg = "`to_owned()` can be used to create an owned `String` \
from a string reference. String concatenation \
appends the string on the right to the string \

View File

@ -71,7 +71,7 @@ impl<'a, 'tcx> CheckVisitor<'a, 'tcx> {
return;
}
let msg = if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {
let msg = if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
format!("unused import: `{}`", snippet)
} else {
"unused import".to_string()

View File

@ -52,7 +52,7 @@ fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) {
fn enforce_trait_manually_implementable(tcx: TyCtxt, impl_def_id: DefId, trait_def_id: DefId) {
let did = Some(trait_def_id);
let li = tcx.lang_items();
let span = tcx.sess.codemap().def_span(tcx.span_of_impl(impl_def_id).unwrap());
let span = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap());
// Disallow *all* explicit impls of `Sized` and `Unsize` for now.
if did == li.sized_trait() {
@ -169,7 +169,7 @@ fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeI
traits::supertrait_def_ids(tcx,
data.principal().unwrap().def_id());
if supertrait_def_ids.any(|d| d == trait_def_id) {
let sp = tcx.sess.codemap().def_span(tcx.span_of_impl(impl_def_id).unwrap());
let sp = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap());
struct_span_err!(tcx.sess,
sp,
E0371,

View File

@ -40,7 +40,7 @@ impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> {
self.tcx.hir.node_to_string(item.id));
let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap();
let trait_def_id = trait_ref.def_id;
let cm = self.tcx.sess.codemap();
let cm = self.tcx.sess.source_map();
let sp = cm.def_span(item.span);
match traits::orphan_check(self.tcx, def_id) {
Ok(()) => {}

View File

@ -72,7 +72,7 @@ impl<'tcx> StructuredDiagnostic<'tcx> for VariadicError<'tcx> {
self.code(),
)
};
if let Ok(snippet) = self.sess.codemap().span_to_snippet(self.span) {
if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.span) {
err.span_suggestion(self.span,
&format!("cast the value to `{}`", self.cast_ty),
format!("{} as {}", snippet, self.cast_ty));

View File

@ -239,7 +239,7 @@ impl Clean<ExternalCrate> for CrateNum {
fn clean(&self, cx: &DocContext) -> ExternalCrate {
let root = DefId { krate: *self, index: CRATE_DEF_INDEX };
let krate_span = cx.tcx.def_span(root);
let krate_src = cx.sess().codemap().span_to_filename(krate_span);
let krate_src = cx.sess().source_map().span_to_filename(krate_span);
// Collect all inner modules which are tagged as implementations of
// primitives.
@ -596,7 +596,7 @@ impl Clean<Item> for doctree::Module {
// determine if we should display the inner contents or
// the outer `mod` item for the source code.
let whence = {
let cm = cx.sess().codemap();
let cm = cx.sess().source_map();
let outer = cm.lookup_char_pos(self.where_outer.lo());
let inner = cm.lookup_char_pos(self.where_inner.lo());
if outer.file.start_pos == inner.file.start_pos {
@ -3015,7 +3015,7 @@ impl Clean<Span> for syntax_pos::Span {
return Span::empty();
}
let cm = cx.sess().codemap();
let cm = cx.sess().source_map();
let filename = cm.span_to_filename(*self);
let lo = cm.lookup_char_pos(self.lo());
let hi = cm.lookup_char_pos(self.hi());
@ -3620,7 +3620,7 @@ pub trait ToSource {
impl ToSource for syntax_pos::Span {
fn to_src(&self, cx: &DocContext) -> String {
debug!("converting span {:?} to snippet", self.clean(cx));
let sn = match cx.sess().codemap().span_to_snippet(*self) {
let sn = match cx.sess().source_map().span_to_snippet(*self) {
Ok(x) => x.to_string(),
Err(_) => "".to_string()
};

View File

@ -33,7 +33,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>,
tooltip: Option<(&str, &str)>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
let sess = parse::ParseSess::new(FilePathMapping::empty());
let fm = sess.codemap().new_source_file(FileName::Custom("stdin".to_string()), src.to_string());
let fm = sess.source_map().new_source_file(FileName::Custom("stdin".to_string()), src.to_string());
let mut out = Vec::new();
if let Some((tooltip, class)) = tooltip {
@ -43,7 +43,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>,
}
write_header(class, &mut out).unwrap();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm, None), sess.codemap());
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm, None), sess.source_map());
if classifier.write_source(&mut out).is_err() {
return format!("<pre>{}</pre>", src);
}

View File

@ -45,7 +45,7 @@ pub struct ErrorLocation {
impl ErrorLocation {
/// Create an error location from a span.
pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
let loc = ecx.codemap().lookup_char_pos_adj(sp.lo());
let loc = ecx.source_map().lookup_char_pos_adj(sp.lo());
ErrorLocation {
filename: loc.filename,
line: loc.line

View File

@ -836,7 +836,7 @@ impl<'a> ExtCtxt<'a> {
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
}
pub fn codemap(&self) -> &'a SourceMap { self.parse_sess.codemap() }
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config }
pub fn call_site(&self) -> Span {

View File

@ -764,7 +764,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
}
fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
let loc = self.codemap().lookup_char_pos(span.lo());
let loc = self.source_map().lookup_char_pos(span.lo());
let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name.to_string()));
let expr_line = self.expr_u32(span, loc.line as u32);
let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1);

View File

@ -267,7 +267,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
let mut module = ModuleData {
mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)],
directory: match self.cx.codemap().span_to_unmapped_path(krate.span) {
directory: match self.cx.source_map().span_to_unmapped_path(krate.span) {
FileName::Real(path) => path,
other => PathBuf::from(other.to_string()),
},
@ -1355,7 +1355,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
module.directory.push(&*item.ident.as_str());
}
} else {
let path = self.cx.parse_sess.codemap().span_to_unmapped_path(inner);
let path = self.cx.parse_sess.source_map().span_to_unmapped_path(inner);
let mut path = match path {
FileName::Real(path) => path,
other => PathBuf::from(other.to_string()),
@ -1563,7 +1563,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
// Add this input file to the code map to make it available as
// dependency information
self.cx.codemap().new_source_file(filename.into(), src);
self.cx.source_map().new_source_file(filename.into(), src);
let include_info = vec![
dummy_spanned(ast::NestedMetaItemKind::MetaItem(

View File

@ -36,7 +36,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
base::check_zero_tts(cx, sp, tts, "line!");
let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.codemap().lookup_char_pos(topmost.lo());
let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
}
@ -47,7 +47,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
base::check_zero_tts(cx, sp, tts, "column!");
let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.codemap().lookup_char_pos(topmost.lo());
let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1))
}
@ -70,7 +70,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
base::check_zero_tts(cx, sp, tts, "file!");
let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.codemap().lookup_char_pos(topmost.lo());
let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
}
@ -154,7 +154,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
// Add this input file to the code map to make it available as
// dependency information
cx.codemap().new_source_file(file.into(), src);
cx.source_map().new_source_file(file.into(), src);
base::MacEager::expr(cx.expr_str(sp, interned_src))
}
@ -184,7 +184,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
Ok(..) => {
// Add this input file to the code map to make it available as
// dependency information, but don't enter it's contents
cx.codemap().new_source_file(file.into(), "".to_string());
cx.source_map().new_source_file(file.into(), "".to_string());
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
}
@ -199,7 +199,7 @@ fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: String) -> PathBuf
// after macro expansion (that is, they are unhygienic).
if !arg.is_absolute() {
let callsite = sp.source_callsite();
let mut path = match cx.codemap().span_to_unmapped_path(callsite) {
let mut path = match cx.source_map().span_to_unmapped_path(callsite) {
FileName::Real(path) => path,
other => panic!("cannot resolve relative path in non-file source `{}`", other),
};

View File

@ -1556,7 +1556,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
if !name.as_str().is_ascii() {
gate_feature_post!(&self,
non_ascii_idents,
self.context.parse_sess.codemap().def_span(sp),
self.context.parse_sess.source_map().def_span(sp),
"non-ascii idents are not fully supported.");
}
}

View File

@ -371,7 +371,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
{
let mut src = String::new();
srdr.read_to_string(&mut src).unwrap();
let cm = SourceMap::new(sess.codemap().path_mapping().clone());
let cm = SourceMap::new(sess.source_map().path_mapping().clone());
let source_file = cm.new_source_file(path, src);
let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);

View File

@ -234,8 +234,8 @@ impl<'a> StringReader<'a> {
}
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
let begin = sess.codemap().lookup_byte_offset(span.lo());
let end = sess.codemap().lookup_byte_offset(span.hi());
let begin = sess.source_map().lookup_byte_offset(span.lo());
let end = sess.source_map().lookup_byte_offset(span.hi());
// Make the range zero-length if the span is invalid.
if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos {

View File

@ -86,7 +86,7 @@ impl ParseSess {
}
}
pub fn codemap(&self) -> &SourceMap {
pub fn source_map(&self) -> &SourceMap {
&self.code_map
}
@ -171,13 +171,13 @@ crate fn parse_stmt_from_source_str(name: FileName, source: String, sess: &Parse
pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess,
override_span: Option<Span>)
-> TokenStream {
source_file_to_stream(sess, sess.codemap().new_source_file(name, source), override_span)
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
}
// Create a new parser from a source string
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-> Parser {
let mut parser = source_file_to_parser(sess, sess.codemap().new_source_file(name, source));
let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source));
parser.recurse_into_file_modules = false;
parser
}
@ -227,7 +227,7 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
/// add the path to the session's codemap and return the new source_file.
fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
-> Lrc<SourceFile> {
match sess.codemap().load_file(path) {
match sess.source_map().load_file(path) {
Ok(source_file) => source_file,
Err(e) => {
let msg = format!("couldn't read {:?}: {}", path.display(), e);
@ -969,7 +969,7 @@ mod tests {
let span = tts.iter().rev().next().unwrap().span();
match sess.codemap().span_to_snippet(span) {
match sess.source_map().span_to_snippet(span) {
Ok(s) => assert_eq!(&s[..], "{ body }"),
Err(_) => panic!("could not get snippet"),
}

View File

@ -577,7 +577,7 @@ impl<'a> Parser<'a> {
if let Some(directory) = directory {
parser.directory = directory;
} else if !parser.span.is_dummy() {
if let FileName::Real(mut path) = sess.codemap().span_to_unmapped_path(parser.span) {
if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) {
path.pop();
parser.directory.path = Cow::from(path);
}
@ -652,10 +652,10 @@ impl<'a> Parser<'a> {
// EOF, don't want to point at the following char, but rather the last token
self.prev_span
} else {
self.sess.codemap().next_point(self.prev_span)
self.sess.source_map().next_point(self.prev_span)
};
let label_exp = format!("expected `{}`", token_str);
let cm = self.sess.codemap();
let cm = self.sess.source_map();
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
// When the spans are in the same line, it means that the only content
@ -720,14 +720,14 @@ impl<'a> Parser<'a> {
expect.clone()
};
(format!("expected one of {}, found `{}`", expect, actual),
(self.sess.codemap().next_point(self.prev_span),
(self.sess.source_map().next_point(self.prev_span),
format!("expected one of {} here", short_expect)))
} else if expected.is_empty() {
(format!("unexpected token: `{}`", actual),
(self.prev_span, "unexpected token after this".to_string()))
} else {
(format!("expected {}, found `{}`", expect, actual),
(self.sess.codemap().next_point(self.prev_span),
(self.sess.source_map().next_point(self.prev_span),
format!("expected {} here", expect)))
};
let mut err = self.fatal(&msg_exp);
@ -738,7 +738,7 @@ impl<'a> Parser<'a> {
label_sp
};
let cm = self.sess.codemap();
let cm = self.sess.source_map();
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
// When the spans are in the same line, it means that the only content between
@ -2902,7 +2902,7 @@ impl<'a> Parser<'a> {
self.this_token_descr()));
// span the `not` plus trailing whitespace to avoid
// trailing whitespace after the `!` in our suggestion
let to_replace = self.sess.codemap()
let to_replace = self.sess.source_map()
.span_until_non_whitespace(lo.to(self.span));
err.span_suggestion_short_with_applicability(
to_replace,
@ -3000,7 +3000,7 @@ impl<'a> Parser<'a> {
Err(mut err) => {
err.span_label(self.span,
"expecting a type here because of type ascription");
let cm = self.sess.codemap();
let cm = self.sess.source_map();
let cur_pos = cm.lookup_char_pos(self.span.lo());
let op_pos = cm.lookup_char_pos(cur_op_span.hi());
if cur_pos.line != op_pos.line {
@ -3161,7 +3161,7 @@ impl<'a> Parser<'a> {
id: ast::DUMMY_NODE_ID
}));
let expr_str = self.sess.codemap().span_to_snippet(expr.span)
let expr_str = self.sess.source_map().span_to_snippet(expr.span)
.unwrap_or(pprust::expr_to_string(&expr));
err.span_suggestion_with_applicability(
expr.span,
@ -3277,7 +3277,7 @@ impl<'a> Parser<'a> {
// return. This won't catch blocks with an explicit `return`, but that would be caught by
// the dead code lint.
if self.eat_keyword(keywords::Else) || !cond.returns() {
let sp = self.sess.codemap().next_point(lo);
let sp = self.sess.source_map().next_point(lo);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing condition for `if` statemement");
err.span_label(sp, "expected if condition here");
@ -3527,7 +3527,7 @@ impl<'a> Parser<'a> {
&& self.token != token::CloseDelim(token::Brace);
if require_comma {
let cm = self.sess.codemap();
let cm = self.sess.source_map();
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
.map_err(|mut err| {
match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) {
@ -3837,7 +3837,7 @@ impl<'a> Parser<'a> {
err.span_label(self.span, "expected `}`");
let mut comma_sp = None;
if self.token == token::Comma { // Issue #49257
etc_sp = etc_sp.to(self.sess.codemap().span_until_non_whitespace(self.span));
etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span));
err.span_label(etc_sp,
"`..` must be at the end and cannot have a trailing comma");
comma_sp = Some(self.span);
@ -3955,7 +3955,7 @@ impl<'a> Parser<'a> {
let seq_span = pat.span.to(self.prev_span);
let mut err = self.struct_span_err(comma_span,
"unexpected `,` in pattern");
if let Ok(seq_snippet) = self.sess.codemap().span_to_snippet(seq_span) {
if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) {
err.span_suggestion_with_applicability(
seq_span,
"try adding parentheses",
@ -4220,7 +4220,7 @@ impl<'a> Parser<'a> {
let parser_snapshot_after_type = self.clone();
mem::replace(self, parser_snapshot_before_type);
let snippet = self.sess.codemap().span_to_snippet(pat.span).unwrap();
let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap();
err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
(Some((parser_snapshot_after_type, colon_sp, err)), None)
}
@ -6039,7 +6039,7 @@ impl<'a> Parser<'a> {
err.emit();
} else {
if seen_comma == false {
let sp = self.sess.codemap().next_point(previous_span);
let sp = self.sess.source_map().next_point(previous_span);
err.span_suggestion_with_applicability(
sp,
"missing comma here",
@ -6051,7 +6051,7 @@ impl<'a> Parser<'a> {
}
}
_ => {
let sp = self.sess.codemap().next_point(self.prev_span);
let sp = self.sess.source_map().next_point(self.prev_span);
let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found `{}`",
self.this_token_to_string()));
if self.token.is_ident() {
@ -6418,7 +6418,7 @@ impl<'a> Parser<'a> {
DirectoryOwnership::UnownedViaMod(_) => None,
};
let paths = Parser::default_submod_path(
id, relative, &self.directory.path, self.sess.codemap());
id, relative, &self.directory.path, self.sess.source_map());
match self.directory.ownership {
DirectoryOwnership::Owned { .. } => {
@ -6445,7 +6445,7 @@ impl<'a> Parser<'a> {
let mut err = self.diagnostic().struct_span_err(id_sp,
"cannot declare a new module at this location");
if !id_sp.is_dummy() {
let src_path = self.sess.codemap().span_to_filename(id_sp);
let src_path = self.sess.source_map().span_to_filename(id_sp);
if let FileName::Real(src_path) = src_path {
if let Some(stem) = src_path.file_stem() {
let mut dest_path = src_path.clone();
@ -7207,7 +7207,7 @@ impl<'a> Parser<'a> {
sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
);
} else {
if let Ok(snippet) = self.sess.codemap().span_to_snippet(ident_sp) {
if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) {
err.span_suggestion_with_applicability(
full_sp,
"if you meant to call a macro, try",

View File

@ -21,7 +21,7 @@ use std::path::PathBuf;
/// Map a string to tts, using a made-up filename:
pub fn string_to_stream(source_str: String) -> TokenStream {
let ps = ParseSess::new(FilePathMapping::empty());
source_file_to_stream(&ps, ps.codemap()
source_file_to_stream(&ps, ps.source_map()
.new_source_file(PathBuf::from("bogofile").into(), source_str), None)
}

View File

@ -801,7 +801,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
}
};
let is_literal = match ecx.codemap().span_to_snippet(fmt_sp) {
let is_literal = match ecx.source_map().span_to_snippet(fmt_sp) {
Ok(ref s) if s.starts_with("\"") || s.starts_with("r#") => true,
_ => false,
};