Auto merge of #78489 - bugadani:array, r=estebank
Minor cleanup around incremental compilation * Remove some short lived vectors * Fix some typos * Avoid some reallocations
This commit is contained in:
commit
8e8939b804
@ -653,13 +653,13 @@ where
|
||||
writeln!(w, r#" edge[{}];"#, content_attrs_str)?;
|
||||
}
|
||||
|
||||
let mut text = Vec::new();
|
||||
for n in g.nodes().iter() {
|
||||
write!(w, " ")?;
|
||||
let id = g.node_id(n);
|
||||
|
||||
let escaped = &g.node_label(n).to_dot_string();
|
||||
|
||||
let mut text = Vec::new();
|
||||
write!(text, "{}", id.as_slice()).unwrap();
|
||||
|
||||
if !options.contains(&RenderOption::NoNodeLabels) {
|
||||
@ -677,6 +677,8 @@ where
|
||||
|
||||
writeln!(text, ";").unwrap();
|
||||
w.write_all(&text[..])?;
|
||||
|
||||
text.clear();
|
||||
}
|
||||
|
||||
for e in g.edges().iter() {
|
||||
@ -687,7 +689,6 @@ where
|
||||
let source_id = g.node_id(&source);
|
||||
let target_id = g.node_id(&target);
|
||||
|
||||
let mut text = Vec::new();
|
||||
write!(text, "{} -> {}", source_id.as_slice(), target_id.as_slice()).unwrap();
|
||||
|
||||
if !options.contains(&RenderOption::NoEdgeLabels) {
|
||||
@ -701,6 +702,8 @@ where
|
||||
|
||||
writeln!(text, ";").unwrap();
|
||||
w.write_all(&text[..])?;
|
||||
|
||||
text.clear();
|
||||
}
|
||||
|
||||
writeln!(w, "}}")
|
||||
|
@ -111,10 +111,12 @@ impl AssertModuleSource<'tcx> {
|
||||
(&user_path[..], None)
|
||||
};
|
||||
|
||||
let mut cgu_path_components = user_path.split('-').collect::<Vec<_>>();
|
||||
let mut iter = user_path.split('-');
|
||||
|
||||
// Remove the crate name
|
||||
assert_eq!(cgu_path_components.remove(0), crate_name);
|
||||
assert_eq!(iter.next().unwrap(), crate_name);
|
||||
|
||||
let cgu_path_components = iter.collect::<Vec<_>>();
|
||||
|
||||
let cgu_name_builder = &mut CodegenUnitNameBuilder::new(self.tcx);
|
||||
let cgu_name =
|
||||
|
@ -160,7 +160,7 @@ pub fn check_dirty_clean_annotations(tcx: TyCtxt<'_>) {
|
||||
|
||||
let mut all_attrs = FindAllAttrs {
|
||||
tcx,
|
||||
attr_names: vec![sym::rustc_dirty, sym::rustc_clean],
|
||||
attr_names: &[sym::rustc_dirty, sym::rustc_clean],
|
||||
found_attrs: vec![],
|
||||
};
|
||||
intravisit::walk_crate(&mut all_attrs, krate);
|
||||
@ -299,7 +299,7 @@ impl DirtyCleanVisitor<'tcx> {
|
||||
|
||||
// Represents a Trait Declaration
|
||||
// FIXME(michaelwoerister): trait declaration is buggy because sometimes some of
|
||||
// the depnodes don't exist (because they legitametely didn't need to be
|
||||
// the depnodes don't exist (because they legitimately didn't need to be
|
||||
// calculated)
|
||||
//
|
||||
// michaelwoerister and vitiral came up with a possible solution,
|
||||
@ -512,17 +512,17 @@ fn expect_associated_value(tcx: TyCtxt<'_>, item: &NestedMetaItem) -> Symbol {
|
||||
}
|
||||
|
||||
// A visitor that collects all #[rustc_dirty]/#[rustc_clean] attributes from
|
||||
// the HIR. It is used to verfiy that we really ran checks for all annotated
|
||||
// the HIR. It is used to verify that we really ran checks for all annotated
|
||||
// nodes.
|
||||
pub struct FindAllAttrs<'tcx> {
|
||||
pub struct FindAllAttrs<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
attr_names: Vec<Symbol>,
|
||||
attr_names: &'a [Symbol],
|
||||
found_attrs: Vec<&'tcx Attribute>,
|
||||
}
|
||||
|
||||
impl FindAllAttrs<'tcx> {
|
||||
impl FindAllAttrs<'_, 'tcx> {
|
||||
fn is_active_attr(&mut self, attr: &Attribute) -> bool {
|
||||
for attr_name in &self.attr_names {
|
||||
for attr_name in self.attr_names {
|
||||
if self.tcx.sess.check_name(attr, *attr_name) && check_config(self.tcx, attr) {
|
||||
return true;
|
||||
}
|
||||
@ -543,7 +543,7 @@ impl FindAllAttrs<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl intravisit::Visitor<'tcx> for FindAllAttrs<'tcx> {
|
||||
impl intravisit::Visitor<'tcx> for FindAllAttrs<'_, 'tcx> {
|
||||
type Map = Map<'tcx>;
|
||||
|
||||
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
|
||||
|
@ -765,7 +765,6 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
|
||||
|
||||
// Now garbage collect the valid session directories.
|
||||
let mut deletion_candidates = vec![];
|
||||
let mut definitely_delete = vec![];
|
||||
|
||||
for (lock_file_name, directory_name) in &lock_file_to_session_dir {
|
||||
debug!("garbage_collect_session_directories() - inspecting: {}", directory_name);
|
||||
@ -842,8 +841,11 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
|
||||
successfully acquired lock"
|
||||
);
|
||||
|
||||
// Note that we are holding on to the lock
|
||||
definitely_delete.push((crate_directory.join(directory_name), Some(lock)));
|
||||
delete_old(sess, &crate_directory.join(directory_name));
|
||||
|
||||
// Let's make it explicit that the file lock is released at this point,
|
||||
// or rather, that we held on to it until here
|
||||
mem::drop(lock);
|
||||
}
|
||||
Err(_) => {
|
||||
debug!(
|
||||
@ -880,28 +882,23 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
|
||||
mem::drop(lock);
|
||||
}
|
||||
|
||||
for (path, lock) in definitely_delete {
|
||||
debug!("garbage_collect_session_directories() - deleting `{}`", path.display());
|
||||
|
||||
if let Err(err) = safe_remove_dir_all(&path) {
|
||||
sess.warn(&format!(
|
||||
"Failed to garbage collect incremental \
|
||||
compilation session directory `{}`: {}",
|
||||
path.display(),
|
||||
err
|
||||
));
|
||||
} else {
|
||||
delete_session_dir_lock_file(sess, &lock_file_path(&path));
|
||||
}
|
||||
|
||||
// Let's make it explicit that the file lock is released at this point,
|
||||
// or rather, that we held on to it until here
|
||||
mem::drop(lock);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn delete_old(sess: &Session, path: &Path) {
|
||||
debug!("garbage_collect_session_directories() - deleting `{}`", path.display());
|
||||
|
||||
if let Err(err) = safe_remove_dir_all(&path) {
|
||||
sess.warn(&format!(
|
||||
"Failed to garbage collect incremental compilation session directory `{}`: {}",
|
||||
path.display(),
|
||||
err
|
||||
));
|
||||
} else {
|
||||
delete_session_dir_lock_file(sess, &lock_file_path(&path));
|
||||
}
|
||||
}
|
||||
|
||||
fn all_except_most_recent(
|
||||
deletion_candidates: Vec<(SystemTime, PathBuf, Option<flock::Lock>)>,
|
||||
) -> FxHashMap<PathBuf, Option<flock::Lock>> {
|
||||
|
@ -153,7 +153,8 @@ fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
|
||||
let total_node_count = serialized_graph.nodes.len();
|
||||
let total_edge_count = serialized_graph.edge_list_data.len();
|
||||
|
||||
let mut counts: FxHashMap<_, Stat> = FxHashMap::default();
|
||||
let mut counts: FxHashMap<_, Stat> =
|
||||
FxHashMap::with_capacity_and_hasher(total_node_count, Default::default());
|
||||
|
||||
for (i, &node) in serialized_graph.nodes.iter_enumerated() {
|
||||
let stat = counts.entry(node.kind).or_insert(Stat {
|
||||
@ -170,14 +171,6 @@ fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
|
||||
let mut counts: Vec<_> = counts.values().cloned().collect();
|
||||
counts.sort_by_key(|s| -(s.node_counter as i64));
|
||||
|
||||
let percentage_of_all_nodes: Vec<f64> = counts
|
||||
.iter()
|
||||
.map(|s| (100.0 * (s.node_counter as f64)) / (total_node_count as f64))
|
||||
.collect();
|
||||
|
||||
let average_edges_per_kind: Vec<f64> =
|
||||
counts.iter().map(|s| (s.edge_counter as f64) / (s.node_counter as f64)).collect();
|
||||
|
||||
println!("[incremental]");
|
||||
println!("[incremental] DepGraph Statistics");
|
||||
|
||||
@ -207,13 +200,13 @@ fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
|
||||
|------------------|"
|
||||
);
|
||||
|
||||
for (i, stat) in counts.iter().enumerate() {
|
||||
for stat in counts.iter() {
|
||||
println!(
|
||||
"[incremental] {:<36}|{:>16.1}% |{:>12} |{:>17.1} |",
|
||||
format!("{:?}", stat.kind),
|
||||
percentage_of_all_nodes[i],
|
||||
(100.0 * (stat.node_counter as f64)) / (total_node_count as f64), // percentage of all nodes
|
||||
stat.node_counter,
|
||||
average_edges_per_kind[i]
|
||||
(stat.edge_counter as f64) / (stat.node_counter as f64), // average edges per kind
|
||||
);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user