Auto merge of #55190 - dlavati:51574_rename_codemap_filemap, r=petrochenkov

Rename other occs of (Code/File)Map to Source(Map/File) #51574

Additional renamings for #51574.
This commit is contained in:
bors 2018-10-30 01:02:40 +00:00
commit fb2446ad58
13 changed files with 210 additions and 209 deletions

View File

@ -11,10 +11,10 @@
//! ICH - Incremental Compilation Hash
crate use rustc_data_structures::fingerprint::Fingerprint;
pub use self::caching_codemap_view::CachingSourceMapView;
pub use self::caching_source_map_view::CachingSourceMapView;
pub use self::hcx::{StableHashingContextProvider, StableHashingContext, NodeIdHashingMode,
hash_stable_trait_impls};
mod caching_codemap_view;
mod caching_source_map_view;
mod hcx;
mod impls_cstore;

View File

@ -25,7 +25,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque,
use session::{CrateDisambiguator, Session};
use std::mem;
use syntax::ast::NodeId;
use syntax::source_map::{SourceMap, StableFilemapId};
use syntax::source_map::{SourceMap, StableSourceFileId};
use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile};
use syntax_pos::hygiene::{Mark, SyntaxContext, ExpnInfo};
use ty;
@ -62,7 +62,7 @@ pub struct OnDiskCache<'sess> {
cnum_map: Once<IndexVec<CrateNum, Option<CrateNum>>>,
source_map: &'sess SourceMap,
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableFilemapId>,
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
// These two fields caches that are populated lazily during decoding.
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
@ -82,7 +82,7 @@ pub struct OnDiskCache<'sess> {
// This type is used only for (de-)serialization.
#[derive(RustcEncodable, RustcDecodable)]
struct Footer {
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableFilemapId>,
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
query_result_index: EncodedQueryResultIndex,
diagnostics_index: EncodedQueryResultIndex,
@ -181,7 +181,7 @@ impl<'sess> OnDiskCache<'sess> {
let index = SourceFileIndex(index as u32);
let file_ptr: *const SourceFile = &**file as *const _;
file_to_file_index.insert(file_ptr, index);
file_index_to_stable_id.insert(index, StableFilemapId::new(&file));
file_index_to_stable_id.insert(index, StableSourceFileId::new(&file));
}
(file_to_file_index, file_index_to_stable_id)
@ -473,7 +473,7 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> {
cnum_map: &'x IndexVec<CrateNum, Option<CrateNum>>,
synthetic_expansion_infos: &'x Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
file_index_to_file: &'x Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
file_index_to_stable_id: &'x FxHashMap<SourceFileIndex, StableFilemapId>,
file_index_to_stable_id: &'x FxHashMap<SourceFileIndex, StableSourceFileId>,
alloc_decoding_session: AllocDecodingSession<'x>,
}

View File

@ -120,7 +120,7 @@ impl ColorConfig {
pub struct EmitterWriter {
dst: Destination,
cm: Option<Lrc<SourceMapperDyn>>,
sm: Option<Lrc<SourceMapperDyn>>,
short_message: bool,
teach: bool,
ui_testing: bool,
@ -134,14 +134,14 @@ struct FileWithAnnotatedLines {
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
code_map: Option<Lrc<SourceMapperDyn>>,
source_map: Option<Lrc<SourceMapperDyn>>,
short_message: bool,
teach: bool)
-> EmitterWriter {
let dst = Destination::from_stderr(color_config);
EmitterWriter {
dst,
cm: code_map,
sm: source_map,
short_message,
teach,
ui_testing: false,
@ -149,13 +149,13 @@ impl EmitterWriter {
}
pub fn new(dst: Box<dyn Write + Send>,
code_map: Option<Lrc<SourceMapperDyn>>,
source_map: Option<Lrc<SourceMapperDyn>>,
short_message: bool,
teach: bool)
-> EmitterWriter {
EmitterWriter {
dst: Raw(dst),
cm: code_map,
sm: source_map,
short_message,
teach,
ui_testing: false,
@ -214,14 +214,14 @@ impl EmitterWriter {
let mut output = vec![];
let mut multiline_annotations = vec![];
if let Some(ref cm) = self.cm {
if let Some(ref sm) = self.sm {
for span_label in msp.span_labels() {
if span_label.span.is_dummy() {
continue;
}
let lo = cm.lookup_char_pos(span_label.span.lo());
let mut hi = cm.lookup_char_pos(span_label.span.hi());
let lo = sm.lookup_char_pos(span_label.span.lo());
let mut hi = sm.lookup_char_pos(span_label.span.hi());
// Watch out for "empty spans". If we get a span like 6..6, we
// want to just display a `^` at 6, so convert that to
@ -724,10 +724,10 @@ impl EmitterWriter {
fn get_multispan_max_line_num(&mut self, msp: &MultiSpan) -> usize {
let mut max = 0;
if let Some(ref cm) = self.cm {
if let Some(ref sm) = self.sm {
for primary_span in msp.primary_spans() {
if !primary_span.is_dummy() {
let hi = cm.lookup_char_pos(primary_span.hi());
let hi = sm.lookup_char_pos(primary_span.hi());
if hi.line > max {
max = hi.line;
}
@ -736,7 +736,7 @@ impl EmitterWriter {
if !self.short_message {
for span_label in msp.span_labels() {
if !span_label.span.is_dummy() {
let hi = cm.lookup_char_pos(span_label.span.hi());
let hi = sm.lookup_char_pos(span_label.span.hi());
if hi.line > max {
max = hi.line;
}
@ -768,7 +768,7 @@ impl EmitterWriter {
always_backtrace: bool) -> bool {
let mut spans_updated = false;
if let Some(ref cm) = self.cm {
if let Some(ref sm) = self.sm {
let mut before_after: Vec<(Span, Span)> = vec![];
let mut new_labels: Vec<(Span, String)> = vec![];
@ -777,7 +777,7 @@ impl EmitterWriter {
if sp.is_dummy() {
continue;
}
let call_sp = cm.call_span_if_macro(*sp);
let call_sp = sm.call_span_if_macro(*sp);
if call_sp != *sp && !always_backtrace {
before_after.push((*sp, call_sp));
}
@ -802,7 +802,7 @@ impl EmitterWriter {
})));
}
// Check to make sure we're not in any <*macros>
if !cm.span_to_filename(def_site).is_macros() &&
if !sm.span_to_filename(def_site).is_macros() &&
!trace.macro_decl_name.starts_with("desugaring of ") &&
!trace.macro_decl_name.starts_with("#[") ||
always_backtrace {
@ -829,7 +829,7 @@ impl EmitterWriter {
if sp_label.span.is_dummy() {
continue;
}
if cm.span_to_filename(sp_label.span.clone()).is_macros() &&
if sm.span_to_filename(sp_label.span.clone()).is_macros() &&
!always_backtrace
{
let v = sp_label.span.macro_backtrace();
@ -1000,10 +1000,10 @@ impl EmitterWriter {
let mut annotated_files = self.preprocess_annotations(msp);
// Make sure our primary file comes first
let (primary_lo, cm) = if let (Some(cm), Some(ref primary_span)) =
(self.cm.as_ref(), msp.primary_span().as_ref()) {
let (primary_lo, sm) = if let (Some(sm), Some(ref primary_span)) =
(self.sm.as_ref(), msp.primary_span().as_ref()) {
if !primary_span.is_dummy() {
(cm.lookup_char_pos(primary_span.lo()), cm)
(sm.lookup_char_pos(primary_span.lo()), sm)
} else {
emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
return Ok(());
@ -1021,7 +1021,7 @@ impl EmitterWriter {
// Print out the annotate source lines that correspond with the error
for annotated_file in annotated_files {
// we can't annotate anything if the source is unavailable.
if !cm.ensure_source_file_source_present(annotated_file.file.clone()) {
if !sm.ensure_source_file_source_present(annotated_file.file.clone()) {
continue;
}
@ -1038,7 +1038,7 @@ impl EmitterWriter {
buffer.append(buffer_msg_line_offset,
&format!("{}:{}:{}",
loc.file.name,
cm.doctest_offset_line(loc.line),
sm.doctest_offset_line(loc.line),
loc.col.0 + 1),
Style::LineAndColumn);
for _ in 0..max_line_num_len {
@ -1048,7 +1048,7 @@ impl EmitterWriter {
buffer.prepend(0,
&format!("{}:{}:{}: ",
loc.file.name,
cm.doctest_offset_line(loc.line),
sm.doctest_offset_line(loc.line),
loc.col.0 + 1),
Style::LineAndColumn);
}
@ -1069,7 +1069,7 @@ impl EmitterWriter {
};
format!("{}:{}{}",
annotated_file.file.name,
cm.doctest_offset_line(first_line.line_index),
sm.doctest_offset_line(first_line.line_index),
col)
} else {
annotated_file.file.name.to_string()
@ -1194,7 +1194,7 @@ impl EmitterWriter {
level: &Level,
max_line_num_len: usize)
-> io::Result<()> {
if let Some(ref cm) = self.cm {
if let Some(ref sm) = self.sm {
let mut buffer = StyledBuffer::new();
// Render the suggestion message
@ -1210,7 +1210,7 @@ impl EmitterWriter {
Some(Style::HeaderMsg));
// Render the replacements for each suggestion
let suggestions = suggestion.splice_lines(&**cm);
let suggestions = suggestion.splice_lines(&**sm);
let mut row_num = 2;
for &(ref complete, ref parts) in suggestions.iter().take(MAX_SUGGESTIONS) {
@ -1221,11 +1221,11 @@ impl EmitterWriter {
&& parts[0].snippet.trim() == complete.trim())
&& complete.lines().count() == 1;
let lines = cm.span_to_lines(parts[0].span).unwrap();
let lines = sm.span_to_lines(parts[0].span).unwrap();
assert!(!lines.lines.is_empty());
let line_start = cm.lookup_char_pos(parts[0].span.lo()).line;
let line_start = sm.lookup_char_pos(parts[0].span.lo()).line;
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
let mut line_pos = 0;
let mut lines = complete.lines();
@ -1250,8 +1250,8 @@ impl EmitterWriter {
if show_underline {
draw_col_separator(&mut buffer, row_num, max_line_num_len + 1);
for part in parts {
let span_start_pos = cm.lookup_char_pos(part.span.lo()).col_display;
let span_end_pos = cm.lookup_char_pos(part.span.hi()).col_display;
let span_start_pos = sm.lookup_char_pos(part.span.lo()).col_display;
let span_end_pos = sm.lookup_char_pos(part.span.hi()).col_display;
// Do not underline the leading...
let start = part.snippet.len()

View File

@ -129,7 +129,7 @@ pub trait SourceMapper {
fn span_to_filename(&self, sp: Span) -> FileName;
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
fn call_span_if_macro(&self, sp: Span) -> Span;
fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool;
fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool;
fn doctest_offset_line(&self, line: usize) -> usize;
}

View File

@ -36,19 +36,19 @@ use rustc_serialize::json::{as_json, as_pretty_json};
pub struct JsonEmitter {
dst: Box<dyn Write + Send>,
registry: Option<Registry>,
cm: Lrc<dyn SourceMapper + sync::Send + sync::Sync>,
sm: Lrc<dyn SourceMapper + sync::Send + sync::Sync>,
pretty: bool,
ui_testing: bool,
}
impl JsonEmitter {
pub fn stderr(registry: Option<Registry>,
code_map: Lrc<SourceMap>,
source_map: Lrc<SourceMap>,
pretty: bool) -> JsonEmitter {
JsonEmitter {
dst: Box::new(io::stderr()),
registry,
cm: code_map,
sm: source_map,
pretty,
ui_testing: false,
}
@ -62,12 +62,12 @@ impl JsonEmitter {
pub fn new(dst: Box<dyn Write + Send>,
registry: Option<Registry>,
code_map: Lrc<SourceMap>,
source_map: Lrc<SourceMap>,
pretty: bool) -> JsonEmitter {
JsonEmitter {
dst,
registry,
cm: code_map,
sm: source_map,
pretty,
ui_testing: false,
}
@ -199,7 +199,7 @@ impl Diagnostic {
}
let buf = BufWriter::default();
let output = buf.clone();
EmitterWriter::new(Box::new(buf), Some(je.cm.clone()), false, false)
EmitterWriter::new(Box::new(buf), Some(je.sm.clone()), false, false)
.ui_testing(je.ui_testing).emit(db);
let output = Arc::try_unwrap(output.0).unwrap().into_inner().unwrap();
let output = String::from_utf8(output).unwrap();
@ -269,8 +269,8 @@ impl DiagnosticSpan {
mut backtrace: vec::IntoIter<MacroBacktrace>,
je: &JsonEmitter)
-> DiagnosticSpan {
let start = je.cm.lookup_char_pos(span.lo());
let end = je.cm.lookup_char_pos(span.hi());
let start = je.sm.lookup_char_pos(span.lo());
let end = je.sm.lookup_char_pos(span.hi());
let backtrace_step = backtrace.next().map(|bt| {
let call_site =
Self::from_span_full(bt.call_site,
@ -356,7 +356,7 @@ impl DiagnosticSpanLine {
/// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
/// `span` within the line.
fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
je.cm.span_to_lines(span)
je.sm.span_to_lines(span)
.map(|lines| {
let fm = &*lines.file;
lines.lines

View File

@ -256,11 +256,11 @@ impl<'a> StringReader<'a> {
let end = sess.source_map().lookup_byte_offset(span.hi());
// Make the range zero-length if the span is invalid.
if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos {
if span.lo() > span.hi() || begin.sf.start_pos != end.sf.start_pos {
span = span.shrink_to_lo();
}
let mut sr = StringReader::new_raw_internal(sess, begin.fm, None);
let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
// Seek the lexer to the right byte range.
sr.next_pos = span.lo();
@ -640,9 +640,9 @@ impl<'a> StringReader<'a> {
// I guess this is the only way to figure out if
// we're at the beginning of the file...
let cmap = SourceMap::new(FilePathMapping::empty());
cmap.files.borrow_mut().file_maps.push(self.source_file.clone());
let loc = cmap.lookup_char_pos_adj(self.pos);
let smap = SourceMap::new(FilePathMapping::empty());
smap.files.borrow_mut().source_files.push(self.source_file.clone());
let loc = smap.lookup_char_pos_adj(self.pos);
debug!("Skipping a shebang");
if loc.line == 1 && loc.col == CharPos(0) {
// FIXME: Add shebang "token", return it
@ -1855,9 +1855,9 @@ mod tests {
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lock;
use with_globals;
fn mk_sess(cm: Lrc<SourceMap>) -> ParseSess {
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
Some(cm.clone()),
Some(sm.clone()),
false,
false);
ParseSess {
@ -1865,7 +1865,7 @@ mod tests {
unstable_features: UnstableFeatures::from_environment(),
config: CrateConfig::default(),
included_mod_stack: Lock::new(Vec::new()),
code_map: cm,
source_map: sm,
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
@ -1875,20 +1875,20 @@ mod tests {
}
// open a string reader for the given string
fn setup<'a>(cm: &SourceMap,
fn setup<'a>(sm: &SourceMap,
sess: &'a ParseSess,
teststr: String)
-> StringReader<'a> {
let fm = cm.new_source_file(PathBuf::from("zebra.rs").into(), teststr);
StringReader::new(sess, fm, None)
let sf = sm.new_source_file(PathBuf::from("zebra.rs").into(), teststr);
StringReader::new(sess, sf, None)
}
#[test]
fn t1() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut string_reader = setup(&cm,
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut string_reader = setup(&sm,
&sh,
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
.to_string());
@ -1934,9 +1934,9 @@ mod tests {
#[test]
fn doublecolonparsing() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()),
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
})
}
@ -1944,9 +1944,9 @@ mod tests {
#[test]
fn dcparsing_2() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
})
}
@ -1954,9 +1954,9 @@ mod tests {
#[test]
fn dcparsing_3() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
})
}
@ -1964,9 +1964,9 @@ mod tests {
#[test]
fn dcparsing_4() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
})
}
@ -1974,9 +1974,9 @@ mod tests {
#[test]
fn character_a() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
})
}
@ -1984,9 +1984,9 @@ mod tests {
#[test]
fn character_space() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None));
})
}
@ -1994,9 +1994,9 @@ mod tests {
#[test]
fn character_escaped() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None));
})
}
@ -2004,9 +2004,9 @@ mod tests {
#[test]
fn lifetime_name() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc")));
})
}
@ -2014,9 +2014,9 @@ mod tests {
#[test]
fn raw_string() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token()
.tok,
token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
@ -2026,15 +2026,15 @@ mod tests {
#[test]
fn literal_suffixes() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
Some(Symbol::intern("suffix"))));
// with a whitespace separator:
assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
None));
}}
@ -2050,13 +2050,13 @@ mod tests {
test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10");
assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
token::Literal(token::Integer(Symbol::intern("2")),
Some(Symbol::intern("us"))));
assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix"))));
assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix"))));
})
@ -2072,9 +2072,9 @@ mod tests {
#[test]
fn nested_block_comments() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok {
token::Comment => {}
_ => panic!("expected a comment!"),
@ -2087,9 +2087,9 @@ mod tests {
#[test]
fn crlf_comments() {
with_globals(|| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.tok, token::Comment);
assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));

View File

@ -57,7 +57,7 @@ pub struct ParseSess {
pub non_modrs_mods: Lock<Vec<(ast::Ident, Span)>>,
/// Used to determine and report recursive mod inclusions
included_mod_stack: Lock<Vec<PathBuf>>,
code_map: Lrc<SourceMap>,
source_map: Lrc<SourceMap>,
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
}
@ -71,7 +71,7 @@ impl ParseSess {
ParseSess::with_span_handler(handler, cm)
}
pub fn with_span_handler(handler: Handler, code_map: Lrc<SourceMap>) -> ParseSess {
pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> ParseSess {
ParseSess {
span_diagnostic: handler,
unstable_features: UnstableFeatures::from_environment(),
@ -80,14 +80,14 @@ impl ParseSess {
raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
included_mod_stack: Lock::new(vec![]),
code_map,
source_map,
non_modrs_mods: Lock::new(vec![]),
buffered_lints: Lock::new(vec![]),
}
}
pub fn source_map(&self) -> &SourceMap {
&self.code_map
&self.source_map
}
pub fn buffer_lint<S: Into<MultiSpan>>(&self,

View File

@ -106,17 +106,17 @@ impl FileLoader for RealFileLoader {
// subsequent compilation sessions (which is something we need to do during
// incremental compilation).
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
pub struct StableFilemapId(u128);
pub struct StableSourceFileId(u128);
impl StableFilemapId {
pub fn new(source_file: &SourceFile) -> StableFilemapId {
impl StableSourceFileId {
pub fn new(source_file: &SourceFile) -> StableSourceFileId {
let mut hasher = StableHasher::new();
source_file.name.hash(&mut hasher);
source_file.name_was_remapped.hash(&mut hasher);
source_file.unmapped_path.hash(&mut hasher);
StableFilemapId(hasher.finish())
StableSourceFileId(hasher.finish())
}
}
@ -126,8 +126,8 @@ impl StableFilemapId {
#[derive(Default)]
pub(super) struct SourceMapFiles {
pub(super) file_maps: Vec<Lrc<SourceFile>>,
stable_id_to_source_file: FxHashMap<StableFilemapId, Lrc<SourceFile>>
pub(super) source_files: Vec<Lrc<SourceFile>>,
stable_id_to_source_file: FxHashMap<StableSourceFileId, Lrc<SourceFile>>
}
pub struct SourceMap {
@ -190,15 +190,16 @@ impl SourceMap {
}
pub fn files(&self) -> MappedLockGuard<Vec<Lrc<SourceFile>>> {
LockGuard::map(self.files.borrow(), |files| &mut files.file_maps)
LockGuard::map(self.files.borrow(), |files| &mut files.source_files)
}
pub fn source_file_by_stable_id(&self, stable_id: StableFilemapId) -> Option<Lrc<SourceFile>> {
self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|fm| fm.clone())
pub fn source_file_by_stable_id(&self, stable_id: StableSourceFileId) ->
Option<Lrc<SourceFile>> {
self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|sf| sf.clone())
}
fn next_start_pos(&self) -> usize {
match self.files.borrow().file_maps.last() {
match self.files.borrow().source_files.last() {
None => 0,
// Add one so there is some space between files. This lets us distinguish
// positions in the source_map, even in the presence of zero-length files.
@ -235,8 +236,8 @@ impl SourceMap {
let mut files = self.files.borrow_mut();
files.file_maps.push(source_file.clone());
files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file),
files.source_files.push(source_file.clone());
files.stable_id_to_source_file.insert(StableSourceFileId::new(&source_file),
source_file.clone());
source_file
@ -293,8 +294,8 @@ impl SourceMap {
let mut files = self.files.borrow_mut();
files.file_maps.push(source_file.clone());
files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file),
files.source_files.push(source_file.clone());
files.stable_id_to_source_file.insert(StableSourceFileId::new(&source_file),
source_file.clone());
source_file
@ -324,7 +325,7 @@ impl SourceMap {
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
let chpos = self.bytepos_to_file_charpos(pos);
match self.lookup_line(pos) {
Ok(SourceFileAndLine { fm: f, line: a }) => {
Ok(SourceFileAndLine { sf: f, line: a }) => {
let line = a + 1; // Line numbers start at 1
let linebpos = f.lines[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
@ -387,10 +388,10 @@ impl SourceMap {
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
let idx = self.lookup_source_file_idx(pos);
let f = (*self.files.borrow().file_maps)[idx].clone();
let f = (*self.files.borrow().source_files)[idx].clone();
match f.lookup_line(pos) {
Some(line) => Ok(SourceFileAndLine { fm: f, line: line }),
Some(line) => Ok(SourceFileAndLine { sf: f, line: line }),
None => Err(f)
}
}
@ -441,7 +442,7 @@ impl SourceMap {
}
pub fn span_to_string(&self, sp: Span) -> String {
if self.files.borrow().file_maps.is_empty() && sp.is_dummy() {
if self.files.borrow().source_files.is_empty() && sp.is_dummy() {
return "no-location".to_string();
}
@ -531,38 +532,38 @@ impl SourceMap {
let local_begin = self.lookup_byte_offset(sp.lo());
let local_end = self.lookup_byte_offset(sp.hi());
if local_begin.fm.start_pos != local_end.fm.start_pos {
if local_begin.sf.start_pos != local_end.sf.start_pos {
return Err(SpanSnippetError::DistinctSources(DistinctSources {
begin: (local_begin.fm.name.clone(),
local_begin.fm.start_pos),
end: (local_end.fm.name.clone(),
local_end.fm.start_pos)
begin: (local_begin.sf.name.clone(),
local_begin.sf.start_pos),
end: (local_end.sf.name.clone(),
local_end.sf.start_pos)
}));
} else {
self.ensure_source_file_source_present(local_begin.fm.clone());
self.ensure_source_file_source_present(local_begin.sf.clone());
let start_index = local_begin.pos.to_usize();
let end_index = local_end.pos.to_usize();
let source_len = (local_begin.fm.end_pos -
local_begin.fm.start_pos).to_usize();
let source_len = (local_begin.sf.end_pos -
local_begin.sf.start_pos).to_usize();
if start_index > end_index || end_index > source_len {
return Err(SpanSnippetError::MalformedForCodemap(
MalformedCodemapPositions {
name: local_begin.fm.name.clone(),
return Err(SpanSnippetError::MalformedForSourcemap(
MalformedSourceMapPositions {
name: local_begin.sf.name.clone(),
source_len,
begin_pos: local_begin.pos,
end_pos: local_end.pos,
}));
}
if let Some(ref src) = local_begin.fm.src {
if let Some(ref src) = local_begin.sf.src {
return Ok(extract_source(src, start_index, end_index));
} else if let Some(src) = local_begin.fm.external_src.borrow().get_source() {
} else if let Some(src) = local_begin.sf.external_src.borrow().get_source() {
return Ok(extract_source(src, start_index, end_index));
} else {
return Err(SpanSnippetError::SourceNotAvailable {
filename: local_begin.fm.name.clone()
filename: local_begin.sf.name.clone()
});
}
}
@ -757,7 +758,7 @@ impl SourceMap {
return 1;
}
let source_len = (local_begin.fm.end_pos - local_begin.fm.start_pos).to_usize();
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len);
// Ensure indexes are also not malformed.
if start_index > end_index || end_index > source_len {
@ -765,11 +766,11 @@ impl SourceMap {
return 1;
}
let src = local_begin.fm.external_src.borrow();
let src = local_begin.sf.external_src.borrow();
// We need to extend the snippet to the end of the src rather than to end_index so when
// searching forwards for boundaries we've got somewhere to search.
let snippet = if let Some(ref src) = local_begin.fm.src {
let snippet = if let Some(ref src) = local_begin.sf.src {
let len = src.len();
(&src[start_index..len])
} else if let Some(src) = src.get_source() {
@ -806,9 +807,9 @@ impl SourceMap {
}
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
for fm in self.files.borrow().file_maps.iter() {
if *filename == fm.name {
return Some(fm.clone());
for sf in self.files.borrow().source_files.iter() {
if *filename == sf.name {
return Some(sf.clone());
}
}
None
@ -817,15 +818,15 @@ impl SourceMap {
/// For a global BytePos compute the local offset within the containing SourceFile
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
let idx = self.lookup_source_file_idx(bpos);
let fm = (*self.files.borrow().file_maps)[idx].clone();
let offset = bpos - fm.start_pos;
SourceFileAndBytePos {fm: fm, pos: offset}
let sf = (*self.files.borrow().source_files)[idx].clone();
let offset = bpos - sf.start_pos;
SourceFileAndBytePos {sf: sf, pos: offset}
}
/// Converts an absolute BytePos to a CharPos relative to the source_file.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
let idx = self.lookup_source_file_idx(bpos);
let map = &(*self.files.borrow().file_maps)[idx];
let map = &(*self.files.borrow().source_files)[idx];
// The number of extra bytes due to multibyte chars in the SourceFile
let mut total_extra_bytes = 0;
@ -851,7 +852,7 @@ impl SourceMap {
// Return the index of the source_file (in self.files) which contains pos.
pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
let files = self.files.borrow();
let files = &files.file_maps;
let files = &files.source_files;
let count = files.len();
// Binary search for the source_file.
@ -974,9 +975,9 @@ impl SourceMapper for SourceMap {
}
sp
}
fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool {
file_map.add_external_src(
|| match file_map.name {
fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
source_file.add_external_src(
|| match source_file.name {
FileName::Real(ref name) => self.file_loader.read_file(name).ok(),
_ => None,
}
@ -1031,97 +1032,97 @@ mod tests {
use super::*;
use rustc_data_structures::sync::Lrc;
fn init_code_map() -> SourceMap {
let cm = SourceMap::new(FilePathMapping::empty());
cm.new_source_file(PathBuf::from("blork.rs").into(),
fn init_source_map() -> SourceMap {
let sm = SourceMap::new(FilePathMapping::empty());
sm.new_source_file(PathBuf::from("blork.rs").into(),
"first line.\nsecond line".to_string());
cm.new_source_file(PathBuf::from("empty.rs").into(),
sm.new_source_file(PathBuf::from("empty.rs").into(),
String::new());
cm.new_source_file(PathBuf::from("blork2.rs").into(),
sm.new_source_file(PathBuf::from("blork2.rs").into(),
"first line.\nsecond line".to_string());
cm
sm
}
#[test]
fn t3() {
// Test lookup_byte_offset
let cm = init_code_map();
let sm = init_source_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(23));
assert_eq!(fmabp1.fm.name, PathBuf::from("blork.rs").into());
assert_eq!(fmabp1.pos, BytePos(23));
let srcfbp1 = sm.lookup_byte_offset(BytePos(23));
assert_eq!(srcfbp1.sf.name, PathBuf::from("blork.rs").into());
assert_eq!(srcfbp1.pos, BytePos(23));
let fmabp1 = cm.lookup_byte_offset(BytePos(24));
assert_eq!(fmabp1.fm.name, PathBuf::from("empty.rs").into());
assert_eq!(fmabp1.pos, BytePos(0));
let srcfbp1 = sm.lookup_byte_offset(BytePos(24));
assert_eq!(srcfbp1.sf.name, PathBuf::from("empty.rs").into());
assert_eq!(srcfbp1.pos, BytePos(0));
let fmabp2 = cm.lookup_byte_offset(BytePos(25));
assert_eq!(fmabp2.fm.name, PathBuf::from("blork2.rs").into());
assert_eq!(fmabp2.pos, BytePos(0));
let srcfbp2 = sm.lookup_byte_offset(BytePos(25));
assert_eq!(srcfbp2.sf.name, PathBuf::from("blork2.rs").into());
assert_eq!(srcfbp2.pos, BytePos(0));
}
#[test]
fn t4() {
// Test bytepos_to_file_charpos
let cm = init_code_map();
let sm = init_source_map();
let cp1 = cm.bytepos_to_file_charpos(BytePos(22));
let cp1 = sm.bytepos_to_file_charpos(BytePos(22));
assert_eq!(cp1, CharPos(22));
let cp2 = cm.bytepos_to_file_charpos(BytePos(25));
let cp2 = sm.bytepos_to_file_charpos(BytePos(25));
assert_eq!(cp2, CharPos(0));
}
#[test]
fn t5() {
// Test zero-length source_files.
let cm = init_code_map();
let sm = init_source_map();
let loc1 = cm.lookup_char_pos(BytePos(22));
let loc1 = sm.lookup_char_pos(BytePos(22));
assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into());
assert_eq!(loc1.line, 2);
assert_eq!(loc1.col, CharPos(10));
let loc2 = cm.lookup_char_pos(BytePos(25));
let loc2 = sm.lookup_char_pos(BytePos(25));
assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into());
assert_eq!(loc2.line, 1);
assert_eq!(loc2.col, CharPos(0));
}
fn init_code_map_mbc() -> SourceMap {
let cm = SourceMap::new(FilePathMapping::empty());
fn init_source_map_mbc() -> SourceMap {
let sm = SourceMap::new(FilePathMapping::empty());
// € is a three byte utf8 char.
cm.new_source_file(PathBuf::from("blork.rs").into(),
sm.new_source_file(PathBuf::from("blork.rs").into(),
"fir€st €€€€ line.\nsecond line".to_string());
cm.new_source_file(PathBuf::from("blork2.rs").into(),
sm.new_source_file(PathBuf::from("blork2.rs").into(),
"first line€€.\n€ second line".to_string());
cm
sm
}
#[test]
fn t6() {
// Test bytepos_to_file_charpos in the presence of multi-byte chars
let cm = init_code_map_mbc();
let sm = init_source_map_mbc();
let cp1 = cm.bytepos_to_file_charpos(BytePos(3));
let cp1 = sm.bytepos_to_file_charpos(BytePos(3));
assert_eq!(cp1, CharPos(3));
let cp2 = cm.bytepos_to_file_charpos(BytePos(6));
let cp2 = sm.bytepos_to_file_charpos(BytePos(6));
assert_eq!(cp2, CharPos(4));
let cp3 = cm.bytepos_to_file_charpos(BytePos(56));
let cp3 = sm.bytepos_to_file_charpos(BytePos(56));
assert_eq!(cp3, CharPos(12));
let cp4 = cm.bytepos_to_file_charpos(BytePos(61));
let cp4 = sm.bytepos_to_file_charpos(BytePos(61));
assert_eq!(cp4, CharPos(15));
}
#[test]
fn t7() {
// Test span_to_lines for a span ending at the end of source_file
let cm = init_code_map();
let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
let file_lines = cm.span_to_lines(span).unwrap();
let file_lines = sm.span_to_lines(span).unwrap();
assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into());
assert_eq!(file_lines.lines.len(), 1);
@ -1143,17 +1144,17 @@ mod tests {
/// lines in the middle of a file.
#[test]
fn span_to_snippet_and_lines_spanning_multiple_lines() {
let cm = SourceMap::new(FilePathMapping::empty());
let sm = SourceMap::new(FilePathMapping::empty());
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
let span = span_from_selection(inputtext, selection);
// check that we are extracting the text we thought we were extracting
assert_eq!(&cm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD");
assert_eq!(&sm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD");
// check that span_to_lines gives us the complete result with the lines/cols we expected
let lines = cm.span_to_lines(span).unwrap();
let lines = sm.span_to_lines(span).unwrap();
let expected = vec![
LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) },
LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) },
@ -1165,9 +1166,9 @@ mod tests {
#[test]
fn t8() {
// Test span_to_snippet for a span ending at the end of source_file
let cm = init_code_map();
let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
let snippet = cm.span_to_snippet(span);
let snippet = sm.span_to_snippet(span);
assert_eq!(snippet, Ok("second line".to_string()));
}
@ -1175,9 +1176,9 @@ mod tests {
#[test]
fn t9() {
// Test span_to_str for a span ending at the end of source_file
let cm = init_code_map();
let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
let sstr = cm.span_to_string(span);
let sstr = sm.span_to_string(span);
assert_eq!(sstr, "blork.rs:2:1: 2:12");
}
@ -1185,15 +1186,15 @@ mod tests {
/// Test failing to merge two spans on different lines
#[test]
fn span_merging_fail() {
let cm = SourceMap::new(FilePathMapping::empty());
let sm = SourceMap::new(FilePathMapping::empty());
let inputtext = "bbbb BB\ncc CCC\n";
let selection1 = " ~~\n \n";
let selection2 = " \n ~~~\n";
cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
let span1 = span_from_selection(inputtext, selection1);
let span2 = span_from_selection(inputtext, selection2);
assert!(cm.merge_spans(span1, span2).is_none());
assert!(sm.merge_spans(span1, span2).is_none());
}
/// Returns the span corresponding to the `n`th occurrence of

View File

@ -50,8 +50,8 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
with_globals(|| {
let output = Arc::new(Mutex::new(Vec::new()));
let code_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
code_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);
@ -59,11 +59,11 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
let span = make_span(&file_text, &span_label.start, &span_label.end);
msp.push_span_label(span, span_label.label.to_string());
println!("span: {:?} label: {:?}", span, span_label.label);
println!("text: {:?}", code_map.span_to_snippet(span));
println!("text: {:?}", source_map.span_to_snippet(span));
}
let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }),
Some(code_map.clone()),
Some(source_map.clone()),
false,
false);
let handler = Handler::with_emitter(true, false, Box::new(emitter));

View File

@ -36,9 +36,9 @@ pub fn analyze_source_file(
// it encounters. If that point is already outside the source_file, remove
// it again.
if let Some(&last_line_start) = lines.last() {
let file_map_end = source_file_start_pos + BytePos::from_usize(src.len());
assert!(file_map_end >= last_line_start);
if last_line_start == file_map_end {
let source_file_end = source_file_start_pos + BytePos::from_usize(src.len());
assert!(source_file_end >= last_line_start);
if last_line_start == source_file_end {
lines.pop();
}
}

View File

@ -1266,9 +1266,9 @@ pub struct LocWithOpt {
// used to be structural records. Better names, anyone?
#[derive(Debug)]
pub struct SourceFileAndLine { pub fm: Lrc<SourceFile>, pub line: usize }
pub struct SourceFileAndLine { pub sf: Lrc<SourceFile>, pub line: usize }
#[derive(Debug)]
pub struct SourceFileAndBytePos { pub fm: Lrc<SourceFile>, pub pos: BytePos }
pub struct SourceFileAndBytePos { pub sf: Lrc<SourceFile>, pub pos: BytePos }
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct LineInfo {
@ -1303,7 +1303,7 @@ pub struct MacroBacktrace {
}
// _____________________________________________________________________________
// SpanLinesError, SpanSnippetError, DistinctSources, MalformedCodemapPositions
// SpanLinesError, SpanSnippetError, DistinctSources, MalformedSourceMapPositions
//
pub type FileLinesResult = Result<FileLines, SpanLinesError>;
@ -1318,7 +1318,7 @@ pub enum SpanLinesError {
pub enum SpanSnippetError {
IllFormedSpan(Span),
DistinctSources(DistinctSources),
MalformedForCodemap(MalformedCodemapPositions),
MalformedForSourcemap(MalformedSourceMapPositions),
SourceNotAvailable { filename: FileName }
}
@ -1329,7 +1329,7 @@ pub struct DistinctSources {
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct MalformedCodemapPositions {
pub struct MalformedSourceMapPositions {
pub name: FileName,
pub source_len: usize,
pub begin_pos: BytePos,

View File

@ -23,8 +23,8 @@ pub fn exported_generic<T>(x: T, y: u32) -> (T, u32) {
// The AST node for the (1 + y) expression generated by the macro will then
// take it's `lo` span bound from the `1` literal in the macro-defining file
// and it's `hi` bound from `y` in this file, which should be lower than the
// `lo` and even lower than the lower bound of the FileMap it is supposedly
// contained in because the FileMap for this file was allocated earlier than
// the FileMap of the macro-defining file.
// `lo` and even lower than the lower bound of the SourceFile it is supposedly
// contained in because the SourceFile for this file was allocated earlier than
// the SourceFile of the macro-defining file.
return (x, add1!(y));
}