incr.comp.: Include header when loading cache files in order to get the same byte offsets as when saving.
This commit is contained in:
parent
67d2b1b7fd
commit
8cbc02238d
@ -58,10 +58,10 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
/// so far) will eagerly deserialize the complete cache. Once we are
|
||||
/// dealing with larger amounts of data (i.e. cached query results),
|
||||
/// deserialization will need to happen lazily.
|
||||
pub fn new(sess: &'sess Session, data: &[u8]) -> OnDiskCache<'sess> {
|
||||
pub fn new(sess: &'sess Session, data: &[u8], start_pos: usize) -> OnDiskCache<'sess> {
|
||||
debug_assert!(sess.opts.incremental.is_some());
|
||||
|
||||
let mut decoder = opaque::Decoder::new(&data[..], 0);
|
||||
let mut decoder = opaque::Decoder::new(&data[..], start_pos);
|
||||
let header = Header::decode(&mut decoder).unwrap();
|
||||
|
||||
let prev_diagnostics: FxHashMap<_, _> = {
|
||||
|
@ -53,19 +53,25 @@ pub fn write_file_header<W: io::Write>(stream: &mut W) -> io::Result<()> {
|
||||
|
||||
/// Reads the contents of a file with a file header as defined in this module.
|
||||
///
|
||||
/// - Returns `Ok(Some(data))` if the file existed and was generated by a
|
||||
/// - Returns `Ok(Some(data, pos))` if the file existed and was generated by a
|
||||
/// compatible compiler version. `data` is the entire contents of the file
|
||||
/// *after* the header.
|
||||
/// and `pos` points to the first byte after the header.
|
||||
/// - Returns `Ok(None)` if the file did not exist or was generated by an
|
||||
/// incompatible version of the compiler.
|
||||
/// - Returns `Err(..)` if some kind of IO error occurred while reading the
|
||||
/// file.
|
||||
pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<Vec<u8>>> {
|
||||
pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<(Vec<u8>, usize)>> {
|
||||
if !path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let mut file = File::open(path)?;
|
||||
let file_size = file.metadata()?.len() as usize;
|
||||
|
||||
let mut data = Vec::with_capacity(file_size);
|
||||
file.read_to_end(&mut data)?;
|
||||
|
||||
let mut file = io::Cursor::new(data);
|
||||
|
||||
// Check FILE_MAGIC
|
||||
{
|
||||
@ -107,10 +113,8 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<Vec<u8>>> {
|
||||
}
|
||||
}
|
||||
|
||||
let mut data = vec![];
|
||||
file.read_to_end(&mut data)?;
|
||||
|
||||
Ok(Some(data))
|
||||
let post_header_start_pos = file.position() as usize;
|
||||
Ok(Some((file.into_inner(), post_header_start_pos)))
|
||||
}
|
||||
|
||||
fn report_format_mismatch(sess: &Session, file: &Path, message: &str) {
|
||||
|
@ -42,9 +42,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||
}
|
||||
|
||||
let work_products_path = work_products_path(tcx.sess);
|
||||
if let Some(work_products_data) = load_data(tcx.sess, &work_products_path) {
|
||||
if let Some((work_products_data, start_pos)) = load_data(tcx.sess, &work_products_path) {
|
||||
// Decode the list of work_products
|
||||
let mut work_product_decoder = Decoder::new(&work_products_data[..], 0);
|
||||
let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos);
|
||||
let work_products: Vec<SerializedWorkProduct> =
|
||||
RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| {
|
||||
let msg = format!("Error decoding `work-products` from incremental \
|
||||
@ -77,9 +77,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
|
||||
fn load_data(sess: &Session, path: &Path) -> Option<(Vec<u8>, usize)> {
|
||||
match file_format::read_file(sess, path) {
|
||||
Ok(Some(data)) => return Some(data),
|
||||
Ok(Some(data_and_pos)) => return Some(data_and_pos),
|
||||
Ok(None) => {
|
||||
// The file either didn't exist or was produced by an incompatible
|
||||
// compiler version. Neither is an error.
|
||||
@ -126,8 +126,8 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
|
||||
|
||||
debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
|
||||
|
||||
let data = match file_format::read_file(tcx.sess, &file_path) {
|
||||
Ok(Some(data)) => data,
|
||||
let (data, start_pos) = match file_format::read_file(tcx.sess, &file_path) {
|
||||
Ok(Some(data_and_pos)) => data_and_pos,
|
||||
Ok(None) => {
|
||||
debug!("load_prev_metadata_hashes() - File produced by incompatible \
|
||||
compiler version: {}", file_path.display());
|
||||
@ -141,7 +141,7 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
|
||||
};
|
||||
|
||||
debug!("load_prev_metadata_hashes() - Decoding hashes");
|
||||
let mut decoder = Decoder::new(&data, 0);
|
||||
let mut decoder = Decoder::new(&data, start_pos);
|
||||
let _ = Svh::decode(&mut decoder).unwrap();
|
||||
let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
|
||||
|
||||
@ -171,8 +171,8 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph {
|
||||
return empty
|
||||
}
|
||||
|
||||
if let Some(bytes) = load_data(sess, &dep_graph_path(sess)) {
|
||||
let mut decoder = Decoder::new(&bytes, 0);
|
||||
if let Some((bytes, start_pos)) = load_data(sess, &dep_graph_path(sess)) {
|
||||
let mut decoder = Decoder::new(&bytes, start_pos);
|
||||
let prev_commandline_args_hash = u64::decode(&mut decoder)
|
||||
.expect("Error reading commandline arg hash from cached dep-graph");
|
||||
|
||||
@ -184,6 +184,10 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph {
|
||||
// We can't reuse the cache, purge it.
|
||||
debug!("load_dep_graph_new: differing commandline arg hashes");
|
||||
|
||||
delete_all_session_dir_contents(sess)
|
||||
.expect("Failed to delete invalidated incr. comp. session \
|
||||
directory contents.");
|
||||
|
||||
// No need to do any further work
|
||||
return empty
|
||||
}
|
||||
@ -202,8 +206,8 @@ pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess
|
||||
return OnDiskCache::new_empty(sess.codemap());
|
||||
}
|
||||
|
||||
if let Some(bytes) = load_data(sess, &query_cache_path(sess)) {
|
||||
OnDiskCache::new(sess, &bytes[..])
|
||||
if let Some((bytes, start_pos)) = load_data(sess, &query_cache_path(sess)) {
|
||||
OnDiskCache::new(sess, &bytes[..], start_pos)
|
||||
} else {
|
||||
OnDiskCache::new_empty(sess.codemap())
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user