Aggregation of drive-by cosmetic changes.

This commit is contained in:
Alexander Regueiro 2019-02-28 22:43:53 +00:00
parent 47f4975cd7
commit 35585c499f
57 changed files with 550 additions and 542 deletions

View File

@ -1351,7 +1351,7 @@ impl<'b> BorrowRefMut<'b> {
}
}
// Clone a `BorrowRefMut`.
// Clones a `BorrowRefMut`.
//
// This is only valid if each `BorrowRefMut` is used to track a mutable
// reference to a distinct, nonoverlapping range of the original object.

View File

@ -71,7 +71,7 @@ impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
/// This is used for object safety, to check that a method's receiver type can be dispatched on.
///
/// example impl:
/// An example implementation of the trait:
///
/// ```
/// # #![feature(dispatch_from_dyn, unsize)]

View File

@ -934,7 +934,6 @@ pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'
visitor.visit_defaultness(defaultness);
}
pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V, struct_definition: &'v VariantData) {
if let Some(ctor_hir_id) = struct_definition.ctor_hir_id() {
visitor.visit_id(ctor_hir_id);

View File

@ -17,7 +17,7 @@
//! 'folding' an existing one), then you create a new ID using `next_id()`.
//!
//! You must ensure that IDs are unique. That means that you should only use the
//! ID from an AST node in a single HIR node (you can assume that AST node IDs
//! ID from an AST node in a single HIR node (you can assume that AST node-IDs
//! are unique). Every new node must have a unique ID. Avoid cloning HIR nodes.
//! If you do, you must then set the new node's ID to a fresh one.
//!
@ -175,6 +175,8 @@ pub trait Resolver {
) -> hir::Path;
}
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
/// and if so, what meaning it has.
#[derive(Debug)]
enum ImplTraitContext<'a> {
/// Treat `impl Trait` as shorthand for a new universal generic parameter.
@ -670,14 +672,14 @@ impl<'a> LoweringContext<'a> {
fn insert_item(&mut self, item: hir::Item) {
let id = item.hir_id;
// FIXME: Use debug_asset-rt
// FIXME: Use `debug_asset-rt`.
assert_eq!(id.local_id, hir::ItemLocalId::from_u32(0));
self.items.insert(id, item);
self.modules.get_mut(&self.current_module).unwrap().items.insert(id);
}
fn allocate_hir_id_counter(&mut self, owner: NodeId) -> hir::HirId {
// Setup the counter if needed
// Set up the counter if needed.
self.item_local_id_counters.entry(owner).or_insert(0);
// Always allocate the first `HirId` for the owner itself.
let lowered = self.lower_node_id_with_owner(owner, owner);
@ -718,7 +720,7 @@ impl<'a> LoweringContext<'a> {
{
let counter = self.item_local_id_counters
.insert(owner, HIR_ID_COUNTER_LOCKED)
.unwrap_or_else(|| panic!("No item_local_id_counters entry for {:?}", owner));
.unwrap_or_else(|| panic!("no `item_local_id_counters` entry for {:?}", owner));
let def_index = self.resolver.definitions().opt_def_index(owner).unwrap();
self.current_hir_id_owner.push((def_index, counter));
let ret = f(self);
@ -758,7 +760,7 @@ impl<'a> LoweringContext<'a> {
let local_id_counter = this
.item_local_id_counters
.get_mut(&owner)
.expect("called lower_node_id_with_owner before allocate_hir_id_counter");
.expect("called `lower_node_id_with_owner` before `allocate_hir_id_counter`");
let local_id = *local_id_counter;
// We want to be sure not to modify the counter in the map while it
@ -771,7 +773,7 @@ impl<'a> LoweringContext<'a> {
.resolver
.definitions()
.opt_def_index(owner)
.expect("You forgot to call `create_def_with_parent` or are lowering node ids \
.expect("you forgot to call `create_def_with_parent` or are lowering node-IDs \
that do not belong to the current owner");
hir::HirId {
@ -863,7 +865,7 @@ impl<'a> LoweringContext<'a> {
result
}
/// Creates a new hir::GenericParam for every new lifetime and
/// Creates a new `hir::GenericParam` for every new lifetime and
/// type parameter encountered while evaluating `f`. Definitions
/// are created with the parent provided. If no `parent_id` is
/// provided, no definitions will be returned.
@ -1197,7 +1199,7 @@ impl<'a> LoweringContext<'a> {
assert_eq!(
len + 1,
self.loop_scopes.len(),
"Loop scopes should be added and removed in stack order"
"loop scopes should be added and removed in stack order"
);
self.loop_scopes.pop().unwrap();
@ -1351,9 +1353,9 @@ impl<'a> LoweringContext<'a> {
}
fn lower_generic_arg(&mut self,
arg: &ast::GenericArg,
itctx: ImplTraitContext<'_>)
-> hir::GenericArg {
arg: &ast::GenericArg,
itctx: ImplTraitContext<'_>)
-> hir::GenericArg {
match arg {
ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(&lt)),
ast::GenericArg::Type(ty) => GenericArg::Type(self.lower_ty_direct(&ty, itctx)),
@ -1537,7 +1539,7 @@ impl<'a> LoweringContext<'a> {
}
}
}
TyKind::Mac(_) => panic!("TyMac should have been expanded by now."),
TyKind::Mac(_) => bug!("`TyMac` should have been expanded by now."),
TyKind::CVarArgs => {
// Create the implicit lifetime of the "spoofed" `VaList`.
let span = self.sess.source_map().next_point(t.span.shrink_to_lo());
@ -1563,7 +1565,7 @@ impl<'a> LoweringContext<'a> {
// Make sure we know that some funky desugaring has been going on here.
// This is a first: there is code in other places like for loop
// desugaring that explicitly states that we don't want to track that.
// Not tracking it makes lints in rustc and clippy very fragile as
// Not tracking it makes lints in rustc and clippy very fragile, as
// frequently opened issues show.
let exist_ty_span = self.mark_span_with_reason(
CompilerDesugaringKind::ExistentialReturnType,
@ -1650,7 +1652,7 @@ impl<'a> LoweringContext<'a> {
parent_index: DefIndex,
bounds: &hir::GenericBounds,
) -> (HirVec<hir::GenericArg>, HirVec<hir::GenericParam>) {
// This visitor walks over impl trait bounds and creates defs for all lifetimes which
// This visitor walks over `impl Trait` bounds and creates defs for all lifetimes that
// appear in the bounds, excluding lifetimes that are created within the bounds.
// E.g., `'a`, `'b`, but not `'c` in `impl for<'c> SomeTrait<'a, 'b, 'c>`.
struct ImplTraitLifetimeCollector<'r, 'a: 'r> {
@ -1758,8 +1760,7 @@ impl<'a> LoweringContext<'a> {
def_node_id,
DefPathData::LifetimeNs(name.ident().as_interned_str()),
Mark::root(),
lifetime.span,
);
lifetime.span);
let (name, kind) = match name {
hir::LifetimeName::Underscore => (
@ -1770,7 +1771,7 @@ impl<'a> LoweringContext<'a> {
param_name,
hir::LifetimeParamKind::Explicit,
),
_ => bug!("expected LifetimeName::Param or ParamName::Plain"),
_ => bug!("expected `LifetimeName::Param` or `ParamName::Plain`"),
};
self.output_lifetime_params.push(hir::GenericParam {
@ -1915,7 +1916,7 @@ impl<'a> LoweringContext<'a> {
{
ParenthesizedGenericArgs::Err
}
// A warning for now, for compatibility reasons
// A warning for now, for compatibility reasons.
_ => ParenthesizedGenericArgs::Warn,
};
@ -2079,11 +2080,14 @@ impl<'a> LoweringContext<'a> {
}
};
err.emit();
(self.lower_angle_bracketed_parameter_data(
&data.as_angle_bracketed_args(),
param_mode,
itctx).0,
false)
(
self.lower_angle_bracketed_parameter_data(
&data.as_angle_bracketed_args(),
param_mode,
itctx
).0,
false,
)
}
},
}
@ -2109,11 +2113,11 @@ impl<'a> LoweringContext<'a> {
let no_ty_args = generic_args.args.len() == expected_lifetimes;
let no_bindings = generic_args.bindings.is_empty();
let (incl_angl_brckt, insertion_span, suggestion) = if no_ty_args && no_bindings {
// If there are no (non-implicit) generic args or associated-type
// If there are no (non-implicit) generic args or associated type
// bindings, our suggestion includes the angle brackets.
(true, path_span.shrink_to_hi(), format!("<{}>", anon_lt_suggestion))
} else {
// Otherwise—sorry, this is kind of gross—we need to infer the
// Otherwise (sorry, this is kind of gross) we need to infer the
// place to splice in the `'_, ` from the generics that do exist.
let first_generic_span = first_generic_span
.expect("already checked that type args or bindings exist");
@ -2196,19 +2200,21 @@ impl<'a> LoweringContext<'a> {
ast::GenericArg::Type(_) => true,
_ => false,
});
(hir::GenericArgs {
args: args.iter().map(|a| self.lower_generic_arg(a, itctx.reborrow())).collect(),
bindings: bindings.iter().map(|b| self.lower_ty_binding(b, itctx.reborrow())).collect(),
parenthesized: false,
},
!has_types && param_mode == ParamMode::Optional)
(
hir::GenericArgs {
args: args.iter().map(|a| self.lower_generic_arg(a, itctx.reborrow())).collect(),
bindings: bindings.iter().map(|b| self.lower_ty_binding(b, itctx.reborrow())).collect(),
parenthesized: false,
},
!has_types && param_mode == ParamMode::Optional
)
}
fn lower_parenthesized_parameter_data(
&mut self,
data: &ParenthesizedArgs,
) -> (hir::GenericArgs, bool) {
// Switch to `PassThrough` mode for anonymous lifetimes: this
// Switch to `PassThrough` mode for anonymous lifetimes; this
// means that we permit things like `&Ref<T>`, where `Ref` has
// a hidden lifetime parameter. This is needed for backwards
// compatibility, even in contexts like an impl header where
@ -2300,16 +2306,16 @@ impl<'a> LoweringContext<'a> {
// Lowers a function declaration.
//
// decl: the unlowered (ast) function declaration.
// fn_def_id: if `Some`, impl Trait arguments are lowered into generic parameters on the
// `decl`: the unlowered (AST) function declaration.
// `fn_def_id`: if `Some`, impl Trait arguments are lowered into generic parameters on the
// given DefId, otherwise impl Trait is disallowed. Must be `Some` if
// make_ret_async is also `Some`.
// impl_trait_return_allow: determines whether impl Trait can be used in return position.
// This guards against trait declarations and implementations where impl Trait is
// `make_ret_async` is also `Some`.
// `impl_trait_return_allow`: determines whether `impl Trait` can be used in return position.
// This guards against trait declarations and implementations where `impl Trait` is
// disallowed.
// make_ret_async: if `Some`, converts `-> T` into `-> impl Future<Output = T>` in the
// return type. This is used for `async fn` declarations. The `NodeId` is the id of the
// return type impl Trait item.
// `make_ret_async`: if `Some`, converts `-> T` into `-> impl Future<Output = T>` in the
// return type. This is used for `async fn` declarations. The `NodeId` is the ID of the
// return type `impl Trait` item.
fn lower_fn_decl(
&mut self,
decl: &FnDecl,
@ -2350,7 +2356,7 @@ impl<'a> LoweringContext<'a> {
);
self.lower_async_fn_ret_ty(
&decl.output,
in_band_ty_params.expect("make_ret_async but no fn_def_id").0,
in_band_ty_params.expect("`make_ret_async` but no `fn_def_id`").0,
ret_id,
lt_replacement,
)
@ -2401,16 +2407,16 @@ impl<'a> LoweringContext<'a> {
})
}
// Transform `-> T` for `async fn` into -> ExistTy { .. }
// Transforms `-> T` for `async fn` into `-> ExistTy { .. }`
// combined with the following definition of `ExistTy`:
//
// existential type ExistTy<generics_from_parent_fn>: Future<Output = T>;
// existential type ExistTy<generics_from_parent_fn>: Future<Output = T>;
//
// inputs: lowered types of arguments to the function. Used to collect lifetimes.
// output: unlowered output type (`T` in `-> T`)
// fn_def_id: DefId of the parent function. Used to create child impl trait definition.
// exist_ty_node_id: NodeId of the existential type that should be created.
// elided_lt_replacement: replacement for elided lifetimes in the return type
// `inputs`: lowered types of arguments to the function (used to collect lifetimes)
// `output`: unlowered output type (`T` in `-> T`)
// `fn_def_id`: `DefId` of the parent function (used to create child impl trait definition)
// `exist_ty_node_id`: `NodeId` of the existential type that should be created
// `elided_lt_replacement`: replacement for elided lifetimes in the return type
fn lower_async_fn_ret_ty(
&mut self,
output: &FunctionRetTy,
@ -2511,7 +2517,7 @@ impl<'a> LoweringContext<'a> {
}))
}
/// Turns `-> T` into `Future<Output = T>`
/// Transforms `-> T` into `Future<Output = T>`
fn lower_async_fn_output_type_to_future_bound(
&mut self,
output: &FunctionRetTy,
@ -2757,9 +2763,9 @@ impl<'a> LoweringContext<'a> {
-> hir::Generics
{
// Collect `?Trait` bounds in where clause and move them to parameter definitions.
// FIXME: this could probably be done with less rightward drift. Also looks like two control
// paths where report_error is called are also the only paths that advance to after
// the match statement, so the error reporting could probably just be moved there.
// FIXME: this could probably be done with less rightward drift. It also looks like two
// control paths where `report_error` is called are the only paths that advance to after the
// match statement, so the error reporting could probably just be moved there.
let mut add_bounds: NodeMap<Vec<_>> = Default::default();
for pred in &generics.where_clause.predicates {
if let WherePredicate::BoundPredicate(ref bound_pred) = *pred {
@ -2952,7 +2958,7 @@ impl<'a> LoweringContext<'a> {
hir_id: self.lower_node_id(f.id),
ident: match f.ident {
Some(ident) => ident,
// FIXME(jseyfried): positional field hygiene
// FIXME(jseyfried): positional field hygiene.
None => Ident::new(sym::integer(index), f.span),
},
vis: self.lower_visibility(&f.vis, None),
@ -2979,7 +2985,7 @@ impl<'a> LoweringContext<'a> {
}
fn lower_param_bounds(&mut self, bounds: &[GenericBound], mut itctx: ImplTraitContext<'_>)
-> hir::GenericBounds {
-> hir::GenericBounds {
bounds.iter().map(|bound| self.lower_param_bound(bound, itctx.reborrow())).collect()
}
@ -3157,7 +3163,7 @@ impl<'a> LoweringContext<'a> {
match *i {
ItemKind::ExternCrate(orig_name) => hir::ItemKind::ExternCrate(orig_name),
ItemKind::Use(ref use_tree) => {
// Start with an empty prefix
// Start with an empty prefix.
let prefix = Path {
segments: vec![],
span: use_tree.span,
@ -3345,7 +3351,8 @@ impl<'a> LoweringContext<'a> {
self.lower_generics(generics, ImplTraitContext::disallowed()),
self.lower_param_bounds(bounds, ImplTraitContext::disallowed()),
),
ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
ItemKind::MacroDef(..)
| ItemKind::Mac(..) => bug!("`TyMac` should have been expanded by now"),
}
// [1] `defaultness.has_value()` is never called for an `impl`, always `true` in order to
@ -3632,7 +3639,7 @@ impl<'a> LoweringContext<'a> {
.map(|x| self.lower_ty(x, ImplTraitContext::disallowed())),
),
),
TraitItemKind::Macro(..) => panic!("Shouldn't exist any more"),
TraitItemKind::Macro(..) => bug!("macro item shouldn't exist at this point"),
};
hir::TraitItem {
@ -3707,7 +3714,7 @@ impl<'a> LoweringContext<'a> {
self.lower_param_bounds(bounds, ImplTraitContext::disallowed()),
),
),
ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"),
ImplItemKind::Macro(..) => bug!("`TyMac` should have been expanded by now"),
};
hir::ImplItem {
@ -5347,7 +5354,7 @@ impl<'a> LoweringContext<'a> {
})
}
/// Given suffix ["b","c","d"], returns path `::std::b::c::d` when
/// Given a suffix `["b", "c", "d"]`, returns path `::std::b::c::d` when
/// `fld.cx.use_std`, and `::core::b::c::d` otherwise.
/// The path is also resolved according to `is_value`.
fn std_path(

View File

@ -19,7 +19,7 @@ use std::iter::repeat;
use crate::ich::StableHashingContext;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
/// A Visitor that walks over the HIR and collects Nodes into a HIR map
/// A visitor that walks over the HIR and collects `Node`s into a HIR map.
pub(super) struct NodeCollector<'a, 'hir> {
/// The crate
krate: &'hir Crate,
@ -45,7 +45,7 @@ pub(super) struct NodeCollector<'a, 'hir> {
hcx: StableHashingContext<'a>,
// We are collecting DepNode::HirBody hashes here so we can compute the
// We are collecting `DepNode::HirBody` hashes here so we can compute the
// crate hash from then later on.
hir_body_nodes: Vec<(DefPathHash, Fingerprint)>,
}
@ -109,7 +109,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
let mut hir_body_nodes = Vec::new();
// Allocate DepNodes for the root module
// Allocate `DepNode`s for the root module.
let (root_mod_sig_dep_index, root_mod_full_dep_index) = {
let Crate {
ref module,

View File

@ -239,7 +239,7 @@ impl DefPath {
"{}[{}]",
component.data.as_interned_str(),
component.disambiguator)
.unwrap();
.unwrap();
}
}
@ -263,7 +263,7 @@ impl DefPath {
"{}[{}]",
component.data.as_interned_str(),
component.disambiguator)
.unwrap();
.unwrap();
}
}
s
@ -442,7 +442,7 @@ impl Definitions {
root_index
}
/// Add a definition with a parent definition.
/// Adds a definition with a parent definition.
pub fn create_def_with_parent(&mut self,
parent: DefIndex,
node_id: ast::NodeId,
@ -559,7 +559,7 @@ impl DefPathData {
GlobalMetaData(name) => {
return name
}
// note that this does not show up in user printouts
// Note that this does not show up in user print-outs.
CrateRoot => sym::double_braced_crate,
Impl => sym::double_braced_impl,
Misc => sym::double_braced_misc,

View File

@ -1,7 +1,8 @@
use self::collector::NodeCollector;
pub use self::def_collector::{DefCollector, MacroInvocationData};
pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData,
DisambiguatedDefPathData, DefPathHash};
pub use self::definitions::{
Definitions, DefKey, DefPath, DefPathData, DisambiguatedDefPathData, DefPathHash
};
use crate::dep_graph::{DepGraph, DepNode, DepKind, DepNodeIndex};
@ -238,7 +239,7 @@ impl<'hir> Map<'hir> {
})
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
#[inline]
pub fn local_def_id_from_hir_id(&self, hir_id: HirId) -> DefId {
self.opt_local_def_id_from_hir_id(hir_id).unwrap_or_else(|| {
@ -247,7 +248,7 @@ impl<'hir> Map<'hir> {
})
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
#[inline]
pub fn opt_local_def_id_from_hir_id(&self, hir_id: HirId) -> Option<DefId> {
let node_id = self.hir_to_node_id(hir_id);
@ -264,7 +265,7 @@ impl<'hir> Map<'hir> {
self.definitions.as_local_node_id(def_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
#[inline]
pub fn as_local_hir_id(&self, def_id: DefId) -> Option<HirId> {
self.definitions.as_local_hir_id(def_id)
@ -426,7 +427,7 @@ impl<'hir> Map<'hir> {
self.fn_decl_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn fn_decl_by_hir_id(&self, hir_id: HirId) -> Option<FnDecl> {
if let Some(entry) = self.find_entry(hir_id) {
entry.fn_decl().cloned()
@ -455,7 +456,7 @@ impl<'hir> Map<'hir> {
self.maybe_body_owned_by_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn maybe_body_owned_by_by_hir_id(&self, hir_id: HirId) -> Option<BodyId> {
if let Some(entry) = self.find_entry(hir_id) {
if self.dep_graph.is_fully_enabled() {
@ -483,7 +484,7 @@ impl<'hir> Map<'hir> {
self.body_owner_kind_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn body_owner_kind_by_hir_id(&self, id: HirId) -> BodyOwnerKind {
match self.get_by_hir_id(id) {
Node::Item(&Item { node: ItemKind::Const(..), .. }) |
@ -587,14 +588,13 @@ impl<'hir> Map<'hir> {
}
}
/// Retrieve the Node corresponding to `id`, panicking if it cannot
/// be found.
/// Retrieves the `Node` corresponding to `id`, panicking if it cannot be found.
pub fn get(&self, id: NodeId) -> Node<'hir> {
let hir_id = self.node_to_hir_id(id);
self.get_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn get_by_hir_id(&self, id: HirId) -> Node<'hir> {
// read recorded by `find`
self.find_by_hir_id(id).unwrap_or_else(||
@ -634,7 +634,7 @@ impl<'hir> Map<'hir> {
self.find_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn find_by_hir_id(&self, hir_id: HirId) -> Option<Node<'hir>> {
let result = self.find_entry(hir_id).and_then(|entry| {
if let Node::Crate = entry.node {
@ -665,7 +665,7 @@ impl<'hir> Map<'hir> {
self.hir_to_node_id(parent_hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn get_parent_node_by_hir_id(&self, hir_id: HirId) -> HirId {
if self.dep_graph.is_fully_enabled() {
let hir_id_owner = hir_id.owner;
@ -721,24 +721,24 @@ impl<'hir> Map<'hir> {
{
let mut id = start_id;
loop {
let parent_node = self.get_parent_node_by_hir_id(id);
if parent_node == CRATE_HIR_ID {
let parent_id = self.get_parent_node_by_hir_id(id);
if parent_id == CRATE_HIR_ID {
return Ok(CRATE_HIR_ID);
}
if parent_node == id {
if parent_id == id {
return Err(id);
}
if let Some(entry) = self.find_entry(parent_node) {
if let Some(entry) = self.find_entry(parent_id) {
if let Node::Crate = entry.node {
return Err(id);
}
if found(&entry.node) {
return Ok(parent_node);
return Ok(parent_id);
} else if bail_early(&entry.node) {
return Err(parent_node);
return Err(parent_id);
}
id = parent_node;
id = parent_id;
} else {
return Err(id);
}
@ -803,7 +803,7 @@ impl<'hir> Map<'hir> {
self.hir_to_node_id(parent_hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn get_parent_item(&self, hir_id: HirId) -> HirId {
match self.walk_parent_nodes(hir_id, |node| match *node {
Node::Item(_) |
@ -824,7 +824,7 @@ impl<'hir> Map<'hir> {
self.get_module_parent_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn get_module_parent_by_hir_id(&self, id: HirId) -> DefId {
self.local_def_id_from_hir_id(self.get_module_parent_node(id))
}
@ -861,7 +861,7 @@ impl<'hir> Map<'hir> {
self.get_parent_did_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn get_parent_did_by_hir_id(&self, id: HirId) -> DefId {
self.local_def_id_from_hir_id(self.get_parent_item(id))
}
@ -871,7 +871,7 @@ impl<'hir> Map<'hir> {
self.get_foreign_abi_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn get_foreign_abi_by_hir_id(&self, hir_id: HirId) -> Abi {
let parent = self.get_parent_item(hir_id);
if let Some(entry) = self.find_entry(parent) {
@ -890,7 +890,7 @@ impl<'hir> Map<'hir> {
self.expect_item_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn expect_item_by_hir_id(&self, id: HirId) -> &'hir Item {
match self.find_by_hir_id(id) { // read recorded by `find`
Some(Node::Item(item)) => item,
@ -946,7 +946,7 @@ impl<'hir> Map<'hir> {
self.expect_expr_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn expect_expr_by_hir_id(&self, id: HirId) -> &'hir Expr {
match self.find_by_hir_id(id) { // read recorded by find
Some(Node::Expr(expr)) => expr,
@ -960,7 +960,7 @@ impl<'hir> Map<'hir> {
self.name_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn name_by_hir_id(&self, id: HirId) -> Name {
match self.get_by_hir_id(id) {
Node::Item(i) => i.ident.name,
@ -977,14 +977,14 @@ impl<'hir> Map<'hir> {
}
}
/// Given a node ID, get a list of attributes associated with the AST
/// corresponding to the Node ID
/// Given a node ID, gets a list of attributes associated with the AST
/// corresponding to the node-ID.
pub fn attrs(&self, id: NodeId) -> &'hir [ast::Attribute] {
let hir_id = self.node_to_hir_id(id);
self.attrs_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn attrs_by_hir_id(&self, id: HirId) -> &'hir [ast::Attribute] {
self.read(id); // reveals attributes on the node
let attrs = match self.find_entry(id).map(|entry| entry.node) {
@ -1053,7 +1053,7 @@ impl<'hir> Map<'hir> {
self.span_by_hir_id(hir_id)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn span_by_hir_id(&self, hir_id: HirId) -> Span {
self.read(hir_id); // reveals span from node
match self.find_entry(hir_id).map(|entry| entry.node) {
@ -1101,7 +1101,7 @@ impl<'hir> Map<'hir> {
hir_id_to_string(self, self.node_to_hir_id(id), true)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn hir_to_string(&self, id: HirId) -> String {
hir_id_to_string(self, id, true)
}
@ -1110,7 +1110,7 @@ impl<'hir> Map<'hir> {
hir_id_to_string(self, self.node_to_hir_id(id), false)
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn hir_to_user_string(&self, id: HirId) -> String {
hir_id_to_string(self, id, false)
}
@ -1119,7 +1119,7 @@ impl<'hir> Map<'hir> {
print::to_string(self, |s| s.print_node(self.get(id)))
}
// FIXME(@ljedrz): replace the NodeId variant
// FIXME(@ljedrz): replace the `NodeId` variant.
pub fn hir_to_pretty_string(&self, id: HirId) -> String {
print::to_string(self, |s| s.print_node(self.get_by_hir_id(id)))
}
@ -1451,8 +1451,9 @@ pub fn provide(providers: &mut Providers<'_>) {
if let Some(node_id) = tcx.hir().as_local_node_id(def_id) {
tcx.hir().def_kind(node_id)
} else {
bug!("Calling local def_kind query provider for upstream DefId: {:?}",
def_id)
bug!("calling local def_kind query provider for upstream DefId: {:?}",
def_id
);
}
};
}

View File

@ -1,4 +1,4 @@
// HIR datatypes. See the [rustc guide] for more info.
//! HIR datatypes. See the [rustc guide] for more info.
//!
//! [rustc guide]: https://rust-lang.github.io/rustc-guide/hir.html
@ -121,13 +121,13 @@ impl fmt::Display for HirId {
}
}
// hack to ensure that we don't try to access the private parts of `ItemLocalId` in this module
// Hack to ensure that we don't try to access the private parts of `ItemLocalId` in this module
mod item_local_id_inner {
use rustc_data_structures::indexed_vec::Idx;
use rustc_macros::HashStable;
newtype_index! {
/// An `ItemLocalId` uniquely identifies something within a given "item-like",
/// that is, within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no
/// An `ItemLocalId` uniquely identifies something within a given "item-like";
/// that is, within a `hir::Item`, `hir::TraitItem`, or `hir::ImplItem`. There is no
/// guarantee that the numerical value of a given `ItemLocalId` corresponds to
/// the node's position within the owning item in any way, but there is a
/// guarantee that the `LocalItemId`s within an owner occupy a dense range of
@ -568,7 +568,6 @@ pub struct GenericParam {
pub bounds: GenericBounds,
pub span: Span,
pub pure_wrt_drop: bool,
pub kind: GenericParamKind,
}
@ -1566,13 +1565,13 @@ pub enum ExprKind {
/// A struct or struct-like variant literal expression.
///
/// For example, `Foo {x: 1, y: 2}`, or
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
/// E.g., `Foo {x: 1, y: 2}`, or `Foo {x: 1, .. base}`,
/// where `base` is the `Option<Expr>`.
Struct(P<QPath>, HirVec<Field>, Option<P<Expr>>),
/// An array literal constructed from one repeated element.
///
/// For example, `[1; 5]`. The first expression is the element
/// E.g., `[1; 5]`. The first expression is the element
/// to be repeated; the second is the number of times to repeat it.
Repeat(P<Expr>, AnonConst),
@ -1583,7 +1582,7 @@ pub enum ExprKind {
Err,
}
/// Optionally `Self`-qualified value/type path or associated extension.
/// Represents an optionally `Self`-qualified value/type path or associated extension.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub enum QPath {
/// Path to a definition, optionally "fully-qualified" with a `Self`
@ -1738,7 +1737,7 @@ pub struct TraitItem {
pub span: Span,
}
/// A trait method's body (or just argument names).
/// Represents a trait method's body (or just argument names).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub enum TraitMethod {
/// No default body in the trait, just a signature.
@ -1751,13 +1750,12 @@ pub enum TraitMethod {
/// Represents a trait method or associated constant or type
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub enum TraitItemKind {
/// An associated constant with an optional value (otherwise `impl`s
/// must contain a value)
/// An associated constant with an optional value (otherwise `impl`s must contain a value).
Const(P<Ty>, Option<BodyId>),
/// A method with an optional body
/// A method with an optional body.
Method(MethodSig, TraitMethod),
/// An associated type with (possibly empty) bounds and optional concrete
/// type
/// type.
Type(GenericBounds, Option<P<Ty>>),
}
@ -1808,9 +1806,9 @@ pub struct TypeBinding {
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Ty {
pub hir_id: HirId,
pub node: TyKind,
pub span: Span,
pub hir_id: HirId,
}
impl fmt::Debug for Ty {
@ -1874,7 +1872,7 @@ pub enum TyKind {
BareFn(P<BareFnTy>),
/// The never type (`!`).
Never,
/// A tuple (`(A, B, C, D,...)`).
/// A tuple (`(A, B, C, D, ...)`).
Tup(HirVec<Ty>),
/// A path to a type definition (`module::module::...::Type`), or an
/// associated type (e.g., `<Vec<T> as Trait>::Type` or `<T>::Target`).
@ -2598,7 +2596,7 @@ impl CodegenFnAttrs {
}
}
/// True if it looks like this symbol needs to be exported, for example:
/// Returns `true` if it looks like this symbol needs to be exported, for example:
///
/// * `#[no_mangle]` is present
/// * `#[export_name(...)]` is present
@ -2607,8 +2605,8 @@ impl CodegenFnAttrs {
self.flags.contains(CodegenFnAttrFlags::NO_MANGLE) ||
self.export_name.is_some() ||
match self.linkage {
// these are private, make sure we don't try to consider
// them external
// These are private, so make sure we don't try to consider
// them external.
None |
Some(Linkage::Internal) |
Some(Linkage::Private) => false,

View File

@ -1679,8 +1679,8 @@ impl<'a> State<'a> {
})?;
}
// FIXME(eddyb) This would leak into error messages, e.g.:
// "non-exhaustive patterns: `Some::<..>(_)` not covered".
// FIXME(eddyb): this would leak into error messages (e.g.,
// "non-exhaustive patterns: `Some::<..>(_)` not covered").
if infer_types && false {
start_or_comma(self)?;
self.s.word("..")?;

View File

@ -286,7 +286,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::Mod {
inner_span.hash_stable(hcx, hasher);
// Combining the DefPathHashes directly is faster than feeding them
// Combining the `DefPathHash`s directly is faster than feeding them
// into the hasher. Because we use a commutative combine, we also don't
// have to sort the array.
let item_ids_hash = item_ids

View File

@ -914,10 +914,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
// variable, and because type variable's can't (at present, at
// least) capture any of the things bound by this binder.
//
// Really, there is no *particular* reason to do this
// `shallow_resolve` here except as a
// micro-optimization. Naturally I could not
// resist. -nmatsakis
// NOTE(nmatsakis): really, there is no *particular* reason to do this
// `shallow_resolve` here except as a micro-optimization.
// Naturally I could not resist.
let two_unbound_type_vars = {
let a = self.shallow_resolve(predicate.skip_binder().a);
let b = self.shallow_resolve(predicate.skip_binder().b);

View File

@ -858,7 +858,7 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> {
def_id, substs
);
// Use the same type variable if the exact same Opaque appears more
// Use the same type variable if the exact same opaque type appears more
// than once in the return type (e.g., if it's passed to a type alias).
if let Some(opaque_defn) = self.opaque_types.get(&def_id) {
return opaque_defn.concrete_ty;
@ -880,9 +880,9 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> {
required_region_bounds
);
// make sure that we are in fact defining the *entire* type
// e.g., `existential type Foo<T: Bound>: Bar;` needs to be
// defined by a function like `fn foo<T: Bound>() -> Foo<T>`.
// Make sure that we are in fact defining the *entire* type
// (e.g., `existential type Foo<T: Bound>: Bar;` needs to be
// defined by a function like `fn foo<T: Bound>() -> Foo<T>`).
debug!(
"instantiate_opaque_types: param_env: {:#?}",
self.param_env,
@ -945,18 +945,15 @@ pub fn may_define_existential_type(
def_id: DefId,
opaque_hir_id: hir::HirId,
) -> bool {
let mut hir_id = tcx
.hir()
.as_local_hir_id(def_id)
.unwrap();
// named existential types can be defined by any siblings or
// children of siblings
let mut hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
// Named existential types can be defined by any siblings or
// children of siblings.
let mod_id = tcx.hir().get_parent_item(opaque_hir_id);
// so we walk up the node tree until we hit the root or the parent
// of the opaque type
while hir_id != mod_id && hir_id != hir::CRATE_HIR_ID {
// We walk up the node tree until we hit the root or the parent
// of the opaque type.
while hir_id != mod_id && node_id != ast::CRATE_HIR_ID {
hir_id = tcx.hir().get_parent_item(hir_id);
}
// syntactically we are allowed to define the concrete type
// Syntactically we are allowed to define the concrete type.
hir_id == mod_id
}

View File

@ -1,6 +1,6 @@
//! Name resolution for lifetimes.
//!
//! Name resolution for lifetimes follows MUCH simpler rules than the
//! Name resolution for lifetimes follows *much* simpler rules than the
//! full resolve. For example, lifetime names are never exported or
//! used between functions, and they operate in a purely top-down
//! way. Therefore, we break lifetime name resolution into a separate pass.
@ -1009,7 +1009,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
trait_ref: &'tcx hir::PolyTraitRef,
_modifier: hir::TraitBoundModifier,
) {
debug!("visit_poly_trait_ref trait_ref={:?}", trait_ref);
debug!("visit_poly_trait_ref(trait_ref={:?})", trait_ref);
if !self.trait_ref_hack || trait_ref.bound_generic_params.iter().any(|param| {
match param.kind {

View File

@ -124,12 +124,12 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> {
// This crate explicitly wants staged API.
debug!("annotate(id = {:?}, attrs = {:?})", hir_id, attrs);
if let Some(..) = attr::find_deprecation(&self.tcx.sess.parse_sess, attrs, item_sp) {
self.tcx.sess.span_err(item_sp, "`#[deprecated]` cannot be used in staged api, \
self.tcx.sess.span_err(item_sp, "`#[deprecated]` cannot be used in staged API; \
use `#[rustc_deprecated]` instead");
}
if let Some(mut stab) = attr::find_stability(&self.tcx.sess.parse_sess,
attrs, item_sp) {
// Error if prohibited, or can't inherit anything from a container
// Error if prohibited, or can't inherit anything from a container.
if kind == AnnotationKind::Prohibited ||
(kind == AnnotationKind::Container &&
stab.level.is_stable() &&

View File

@ -49,7 +49,8 @@ pub struct ConstEvalErr<'tcx> {
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
pub struct FrameInfo<'tcx> {
pub call_site: Span, // this span is in the caller!
/// This span is in the caller.
pub call_site: Span,
pub instance: ty::Instance<'tcx>,
pub lint_root: Option<hir::HirId>,
}
@ -200,12 +201,12 @@ fn print_backtrace(backtrace: &mut Backtrace) {
impl<'tcx> From<InterpError<'tcx, u64>> for EvalError<'tcx> {
fn from(kind: InterpError<'tcx, u64>) -> Self {
let backtrace = match env::var("RUST_CTFE_BACKTRACE") {
// matching RUST_BACKTRACE, we treat "0" the same as "not present".
// Matching `RUST_BACKTRACE` -- we treat "0" the same as "not present".
Ok(ref val) if val != "0" => {
let mut backtrace = Backtrace::new_unresolved();
if val == "immediate" {
// Print it now
// Print it now.
print_backtrace(&mut backtrace);
None
} else {

View File

@ -662,7 +662,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
"{}",
message.unwrap_or_else(||
format!("the trait bound `{}` is not satisfied{}",
trait_ref.to_predicate(), post_message)
trait_ref.to_predicate(), post_message)
));
let explanation =
@ -676,7 +676,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
};
if let Some(ref s) = label {
// If it has a custom "#[rustc_on_unimplemented]"
// If it has a custom `#[rustc_on_unimplemented]`
// error message, let's display it as the label!
err.span_label(span, s.as_str());
err.help(&explanation);
@ -684,7 +684,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
err.span_label(span, explanation);
}
if let Some(ref s) = note {
// If it has a custom "#[rustc_on_unimplemented]" note, let's display it
// If it has a custom `#[rustc_on_unimplemented]` note, let's display it
err.note(s.as_str());
}

View File

@ -1465,9 +1465,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
let predicate = self.infcx()
.resolve_vars_if_possible(&obligation.predicate);
// OK to skip binder because of the nature of the
// Okay to skip binder because of the nature of the
// trait-ref-is-knowable check, which does not care about
// bound regions
// bound regions.
let trait_ref = predicate.skip_binder().trait_ref;
let result = coherence::trait_ref_is_knowable(self.tcx(), trait_ref);
@ -1853,7 +1853,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
let matching_bounds =
all_bounds.filter(|p| p.def_id() == stack.obligation.predicate.def_id());
// keep only those bounds which may apply, and propagate overflow if it occurs
// Keep only those bounds which may apply, and propagate overflow if it occurs.
let mut param_candidates = vec![];
for bound in matching_bounds {
let wc = self.evaluate_where_clause(stack, bound.clone())?;
@ -1891,9 +1891,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
return Ok(());
}
// OK to skip binder because the substs on generator types never
// Okay to skip binder because the substs on generator types never
// touch bound regions, they just capture the in-scope
// type/region parameters
// type/region parameters.
let self_ty = *obligation.self_ty().skip_binder();
match self_ty.sty {
ty::Generator(..) => {
@ -1935,7 +1935,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
}
};
// OK to skip binder because the substs on closure types never
// Okay to skip binder because the substs on closure types never
// touch bound regions, they just capture the in-scope
// type/region parameters
match obligation.self_ty().skip_binder().sty {
@ -1985,7 +1985,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
return Ok(());
}
// OK to skip binder because what we are inspecting doesn't involve bound regions
// Okay to skip binder because what we are inspecting doesn't involve bound regions
let self_ty = *obligation.self_ty().skip_binder();
match self_ty.sty {
ty::Infer(ty::TyVar(_)) => {
@ -2042,7 +2042,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) -> Result<(), SelectionError<'tcx>> {
// OK to skip binder here because the tests we do below do not involve bound regions
// Okay to skip binder here because the tests we do below do not involve bound regions.
let self_ty = *obligation.self_ty().skip_binder();
debug!("assemble_candidates_from_auto_impls(self_ty={:?})", self_ty);
@ -2274,7 +2274,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) -> Result<(), SelectionError<'tcx>> {
// OK to skip binder here because the tests we do below do not involve bound regions
// Okay to skip binder here because the tests we do below do not involve bound regions.
let self_ty = *obligation.self_ty().skip_binder();
debug!("assemble_candidates_for_trait_alias(self_ty={:?})", self_ty);
@ -3094,7 +3094,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
) -> Result<VtableFnPointerData<'tcx, PredicateObligation<'tcx>>, SelectionError<'tcx>> {
debug!("confirm_fn_pointer_candidate({:?})", obligation);
// OK to skip binder; it is reintroduced below
// Okay to skip binder; it is reintroduced below.
let self_ty = self.infcx
.shallow_resolve(*obligation.self_ty().skip_binder());
let sig = self_ty.fn_sig(self.tcx());
@ -3172,9 +3172,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
&mut self,
obligation: &TraitObligation<'tcx>,
) -> Result<VtableGeneratorData<'tcx, PredicateObligation<'tcx>>, SelectionError<'tcx>> {
// OK to skip binder because the substs on generator types never
// Okay to skip binder because the substs on generator types never
// touch bound regions, they just capture the in-scope
// type/region parameters
// type/region parameters.
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
let (generator_def_id, substs) = match self_ty.sty {
ty::Generator(id, substs, _) => (id, substs),
@ -3229,9 +3229,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
.fn_trait_kind(obligation.predicate.def_id())
.unwrap_or_else(|| bug!("closure candidate for non-fn trait {:?}", obligation));
// OK to skip binder because the substs on closure types never
// Okay to skip binder because the substs on closure types never
// touch bound regions, they just capture the in-scope
// type/region parameters
// type/region parameters.
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
let (closure_def_id, substs) = match self_ty.sty {
ty::Closure(id, substs) => (id, substs),

View File

@ -10,11 +10,11 @@ use std::fmt;
use std::rc::Rc;
use std::collections::{BTreeSet, BTreeMap};
// structural impls for the structs in traits
// Structural impls for the structs in `traits`.
impl<'tcx, T: fmt::Debug> fmt::Debug for Normalized<'tcx, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Normalized({:?},{:?})", self.value, self.obligations)
write!(f, "Normalized({:?}, {:?})", self.value, self.obligations)
}
}
@ -23,13 +23,13 @@ impl<'tcx, O: fmt::Debug> fmt::Debug for traits::Obligation<'tcx, O> {
if ty::tls::with(|tcx| tcx.sess.verbose()) {
write!(
f,
"Obligation(predicate={:?},cause={:?},param_env={:?},depth={})",
"Obligation(predicate={:?}, cause={:?}, param_env={:?}, depth={})",
self.predicate, self.cause, self.param_env, self.recursion_depth
)
} else {
write!(
f,
"Obligation(predicate={:?},depth={})",
"Obligation(predicate={:?}, depth={})",
self.predicate, self.recursion_depth
)
}

View File

@ -1706,21 +1706,21 @@ impl<'gcx> GlobalCtxt<'gcx> {
}
}
/// A trait implemented for all X<'a> types which can be safely and
/// efficiently converted to X<'tcx> as long as they are part of the
/// provided TyCtxt<'tcx>.
/// This can be done, for example, for Ty<'tcx> or SubstsRef<'tcx>
/// A trait implemented for all `X<'a>` types that can be safely and
/// efficiently converted to `X<'tcx>` as long as they are part of the
/// provided `TyCtxt<'tcx>`.
/// This can be done, for example, for `Ty<'tcx>` or `SubstsRef<'tcx>`
/// by looking them up in their respective interners.
///
/// However, this is still not the best implementation as it does
/// need to compare the components, even for interned values.
/// It would be more efficient if TypedArena provided a way to
/// It would be more efficient if `TypedArena` provided a way to
/// determine whether the address is in the allocated range.
///
/// None is returned if the value or one of the components is not part
/// of the provided context.
/// For Ty, None can be returned if either the type interner doesn't
/// contain the TyKind key or if the address of the interned
/// For `Ty`, `None` can be returned if either the type interner doesn't
/// contain the `TyKind` key or if the address of the interned
/// pointer differs. The latter case is possible if a primitive type,
/// e.g., `()` or `u8`, was interned in a different context.
pub trait Lift<'tcx>: fmt::Debug {

View File

@ -1090,7 +1090,7 @@ pub enum Predicate<'tcx> {
/// See the `ProjectionPredicate` struct for details.
Projection(PolyProjectionPredicate<'tcx>),
/// no syntax: `T` well-formed
/// No syntax: `T` well-formed.
WellFormed(Ty<'tcx>),
/// Trait must be object-safe.
@ -1245,19 +1245,17 @@ impl<'tcx> TraitPredicate<'tcx> {
impl<'tcx> PolyTraitPredicate<'tcx> {
pub fn def_id(&self) -> DefId {
// ok to skip binder since trait def-id does not care about regions
// Ok to skip binder since trait def-ID does not care about regions.
self.skip_binder().def_id()
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord,
Hash, Debug, RustcEncodable, RustcDecodable, HashStable)]
pub struct OutlivesPredicate<A,B>(pub A, pub B); // `A: B`
pub type PolyOutlivesPredicate<A,B> = ty::Binder<OutlivesPredicate<A,B>>;
pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate<ty::Region<'tcx>,
ty::Region<'tcx>>;
pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate<Ty<'tcx>,
ty::Region<'tcx>>;
pub struct OutlivesPredicate<A, B>(pub A, pub B); // `A: B`
pub type PolyOutlivesPredicate<A, B> = ty::Binder<OutlivesPredicate<A, B>>;
pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>;
pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>;
pub type PolyRegionOutlivesPredicate<'tcx> = ty::Binder<RegionOutlivesPredicate<'tcx>>;
pub type PolyTypeOutlivesPredicate<'tcx> = ty::Binder<TypeOutlivesPredicate<'tcx>>;
@ -1314,7 +1312,7 @@ impl<'tcx> PolyProjectionPredicate<'tcx> {
/// Note that this is not the `DefId` of the `TraitRef` containing this
/// associated type, which is in `tcx.associated_item(projection_def_id()).container`.
pub fn projection_def_id(&self) -> DefId {
// okay to skip binder since trait def-id does not care about regions
// Ok to skip binder since trait def-ID does not care about regions.
self.skip_binder().projection_ty.item_def_id
}
}
@ -1371,7 +1369,7 @@ impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> {
}
}
// A custom iterator used by Predicate::walk_tys.
// A custom iterator used by `Predicate::walk_tys`.
enum WalkTysIter<'tcx, I, J, K>
where I: Iterator<Item = Ty<'tcx>>,
J: Iterator<Item = Ty<'tcx>>,
@ -1505,7 +1503,7 @@ impl<'tcx> Predicate<'tcx> {
///
/// Example:
///
/// struct Foo<T,U:Bar<T>> { ... }
/// struct Foo<T, U: Bar<T>> { ... }
///
/// Here, the `GenericPredicates` for `Foo` would contain a list of bounds like
/// `[[], [U:Bar<T>]]`. Now if there were some particular reference
@ -2785,10 +2783,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
e.span
}
Some(f) => {
bug!("Node id {} is not an expr: {:?}", id, f);
bug!("node-ID {} is not an expr: {:?}", id, f);
}
None => {
bug!("Node id {} is not present in the node map", id);
bug!("node-ID {} is not present in the node map", id);
}
}
}

View File

@ -96,12 +96,12 @@ pub(super) struct JobOwner<'a, 'tcx: 'a, Q: QueryDescription<'tcx> + 'a> {
}
impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
/// Either gets a JobOwner corresponding the query, allowing us to
/// Either gets a `JobOwner` corresponding the query, allowing us to
/// start executing the query, or it returns with the result of the query.
/// If the query is executing elsewhere, this will wait for it.
/// If the query panicked, this will silently panic.
///
/// This function is inlined because that results in a noticeable speedup
/// This function is inlined because that results in a noticeable speed-up
/// for some compile-time benchmarks.
#[inline(always)]
pub(super) fn try_get(
@ -126,9 +126,9 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
Entry::Occupied(entry) => {
match *entry.get() {
QueryResult::Started(ref job) => {
//For parallel queries, we'll block and wait until the query running
//in another thread has completed. Record how long we wait in the
//self-profiler
// For parallel queries, we'll block and wait until the query running
// in another thread has completed. Record how long we wait in the
// self-profiler.
#[cfg(parallel_compiler)]
tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME));
@ -138,7 +138,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
}
}
Entry::Vacant(entry) => {
// No job entry for this query. Return a new one to be started later
// No job entry for this query. Return a new one to be started later.
return tls::with_related_context(tcx, |icx| {
// Create the `parent` variable before `info`. This allows LLVM
// to elide the move of `info`
@ -161,14 +161,14 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
mem::drop(lock);
// If we are single-threaded we know that we have cycle error,
// so we just return the error
// so we just return the error.
#[cfg(not(parallel_compiler))]
return TryGetJob::Cycle(cold_path(|| {
Q::handle_cycle_error(tcx, job.find_cycle_in_stack(tcx, span))
}));
// With parallel queries we might just have to wait on some other
// thread
// thread.
#[cfg(parallel_compiler)]
{
let result = job.r#await(tcx, span);
@ -636,8 +636,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
profq_query_msg!(Q::NAME.as_str(), self, key))
);
// We may be concurrently trying both execute and force a query
// Ensure that only one of them runs the query
// We may be concurrently trying both execute and force a query.
// Ensure that only one of them runs the query.
let job = match JobOwner::try_get(self, span, &key) {
TryGetJob::NotYetStarted(job) => job,
TryGetJob::Cycle(_) |
@ -731,7 +731,7 @@ macro_rules! define_queries_inner {
let mut jobs = Vec::new();
// We use try_lock here since we are only called from the
// deadlock handler, and this shouldn't be locked
// deadlock handler, and this shouldn't be locked.
$(
jobs.extend(
self.$name.try_lock().unwrap().active.values().filter_map(|v|

View File

@ -546,7 +546,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
self.def_key(def_id).disambiguated_data.data == DefPathData::Ctor
}
/// Given the `DefId` of a fn or closure, returns the `DefId` of
/// Given the def-ID of a fn or closure, returns the def-ID of
/// the innermost fn item that the closure is contained within.
/// This is a significant `DefId` because, when we do
/// type-checking, we type-check this fn item and all of its

View File

@ -18,7 +18,7 @@ use crate::dep_graph::{DepNode};
use lazy_static;
use crate::session::Session;
// The name of the associated type for `Fn` return types
// The name of the associated type for `Fn` return types.
pub const FN_OUTPUT_NAME: Symbol = sym::Output;
// Useful type to use with `Result<>` indicate that an error has already
@ -45,16 +45,16 @@ fn panic_hook(info: &panic::PanicInfo<'_>) {
TyCtxt::try_print_query_stack();
}
#[cfg(windows)]
unsafe {
if env::var("RUSTC_BREAK_ON_ICE").is_ok() {
extern "system" {
fn DebugBreak();
}
// Trigger a debugger if we crashed during bootstrap
DebugBreak();
#[cfg(windows)]
unsafe {
if env::var("RUSTC_BREAK_ON_ICE").is_ok() {
extern "system" {
fn DebugBreak();
}
// Trigger a debugger if we crashed during bootstrap.
DebugBreak();
}
}
}
pub fn install_panic_hook() {
@ -80,42 +80,42 @@ pub struct QueryMsg {
}
/// A sequence of these messages induce a trace of query-based incremental compilation.
/// FIXME(matthewhammer): Determine whether we should include cycle detection here or not.
// FIXME(matthewhammer): Determine whether we should include cycle detection here or not.
#[derive(Clone,Debug)]
pub enum ProfileQueriesMsg {
/// begin a timed pass
/// Begin a timed pass.
TimeBegin(String),
/// end a timed pass
/// End a timed pass.
TimeEnd,
/// begin a task (see dep_graph::graph::with_task)
/// Begin a task (see `dep_graph::graph::with_task`).
TaskBegin(DepNode),
/// end a task
/// End a task.
TaskEnd,
/// begin a new query
/// can't use `Span` because queries are sent to other thread
/// Begin a new query.
/// Cannot use `Span` because queries are sent to other thread.
QueryBegin(SpanData, QueryMsg),
/// query is satisfied by using an already-known value for the given key
/// Query is satisfied by using an already-known value for the given key.
CacheHit,
/// query requires running a provider; providers may nest, permitting queries to nest.
/// Query requires running a provider; providers may nest, permitting queries to nest.
ProviderBegin,
/// query is satisfied by a provider terminating with a value
/// Query is satisfied by a provider terminating with a value.
ProviderEnd,
/// dump a record of the queries to the given path
/// Dump a record of the queries to the given path.
Dump(ProfQDumpParams),
/// halt the profiling/monitoring background thread
/// Halt the profiling/monitoring background thread.
Halt
}
/// If enabled, send a message to the profile-queries thread
/// If enabled, send a message to the profile-queries thread.
pub fn profq_msg(sess: &Session, msg: ProfileQueriesMsg) {
if let Some(s) = sess.profile_channel.borrow().as_ref() {
s.send(msg).unwrap()
} else {
// Do nothing
// Do nothing.
}
}
/// Set channel for profile queries channel
/// Set channel for profile queries channel.
pub fn profq_set_chan(sess: &Session, s: Sender<ProfileQueriesMsg>) -> bool {
let mut channel = sess.profile_channel.borrow_mut();
if channel.is_none() {

View File

@ -19,7 +19,6 @@ use rustc::hir;
use rustc::hir::intravisit;
use rustc::hir::print as pprust;
#[derive(Copy, Clone, Debug)]
pub enum EntryOrExit {
Entry,
@ -92,7 +91,7 @@ fn get_cfg_indices<'a>(id: hir::ItemLocalId,
index.get(&id).map_or(&[], |v| &v[..])
}
impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
impl<'a, 'tcx, O: DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
fn has_bitset_for_local_id(&self, n: hir::ItemLocalId) -> bool {
assert!(n != hir::DUMMY_ITEM_LOCAL_ID);
self.local_id_to_index.contains_key(&n)
@ -225,7 +224,7 @@ pub enum KillFrom {
Execution,
}
impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
impl<'a, 'tcx, O: DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
analysis_name: &'static str,
body: Option<&hir::Body>,
@ -500,8 +499,8 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
}
}
impl<'a, 'tcx, O:DataFlowOperator+Clone+'static> DataFlowContext<'a, 'tcx, O> {
// ^^^^^^^^^^^^^ only needed for pretty printing
// N.B. `Clone + 'static` only needed for pretty printing.
impl<'a, 'tcx, O: DataFlowOperator + Clone + 'static> DataFlowContext<'a, 'tcx, O> {
pub fn propagate(&mut self, cfg: &cfg::CFG, body: &hir::Body) {
//! Performs the data flow analysis.
@ -538,7 +537,7 @@ impl<'a, 'tcx, O:DataFlowOperator+Clone+'static> DataFlowContext<'a, 'tcx, O> {
}
}
impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> {
impl<'a, 'b, 'tcx, O: DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> {
fn walk_cfg(&mut self,
cfg: &cfg::CFG,
nodes_po: &[CFGIndex],
@ -547,7 +546,7 @@ impl<'a, 'b, 'tcx, O:DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> {
bits_to_string(in_out), self.dfcx.analysis_name);
assert!(self.dfcx.bits_per_id > 0);
// Iterate over nodes in reverse postorder
// Iterate over nodes in reverse post-order.
for &node_index in nodes_po.iter().rev() {
let node = cfg.graph.node(node_index);
debug!("DataFlowContext::walk_cfg idx={:?} id={:?} begin in_out={}",
@ -631,9 +630,9 @@ fn bits_to_string(words: &[usize]) -> String {
}
#[inline]
fn bitwise<Op:BitwiseOperator>(out_vec: &mut [usize],
in_vec: &[usize],
op: &Op) -> bool {
fn bitwise<Op: BitwiseOperator>(out_vec: &mut [usize],
in_vec: &[usize],
op: &Op) -> bool {
assert_eq!(out_vec.len(), in_vec.len());
let mut changed = false;
for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) {

View File

@ -937,8 +937,8 @@ fn codegen_msvc_try(
bx.store(ret, dest, i32_align);
}
// Definition of the standard "try" function for Rust using the GNU-like model
// of exceptions (e.g., the normal semantics of LLVM's landingpad and invoke
// Definition of the standard `try` function for Rust using the GNU-like model
// of exceptions (e.g., the normal semantics of LLVM's `landingpad` and `invoke`
// instructions).
//
// This codegen is a little surprising because we always call a shim

View File

@ -1127,10 +1127,10 @@ fn link_args<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker,
// For this reason, we have organized the arguments we pass to the linker as
// such:
//
// 1. The local object that LLVM just generated
// 2. Local native libraries
// 3. Upstream rust libraries
// 4. Upstream native libraries
// 1. The local object that LLVM just generated
// 2. Local native libraries
// 3. Upstream rust libraries
// 4. Upstream native libraries
//
// The rationale behind this ordering is that those items lower down in the
// list can't depend on items higher up in the list. For example nothing can

View File

@ -967,7 +967,7 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx.range_metadata(llval, 0..2);
}
}
// We store bools as i8 so we need to truncate to i1.
// We store bools as `i8` so we need to truncate to `i1`.
llval = base::to_immediate(bx, llval, arg.layout);
}
}
@ -1097,7 +1097,7 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn_ret: &ArgType<'tcx, Ty<'tcx>>,
llargs: &mut Vec<Bx::Value>, is_intrinsic: bool
) -> ReturnDest<'tcx, Bx::Value> {
// If the return is ignored, we can just return a do-nothing ReturnDest
// If the return is ignored, we can just return a do-nothing `ReturnDest`.
if fn_ret.is_ignore() {
return ReturnDest::Nothing;
}
@ -1106,8 +1106,8 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
LocalRef::Place(dest) => dest,
LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
LocalRef::Operand(None) => {
// Handle temporary places, specifically Operand ones, as
// they don't have allocas
// Handle temporary places, specifically `Operand` ones, as
// they don't have `alloca`s.
return if fn_ret.is_indirect() {
// Odd, but possible, case, we have an operand temporary,
// but the calling convention has an indirect return.
@ -1117,8 +1117,8 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
ReturnDest::IndirectOperand(tmp, index)
} else if is_intrinsic {
// Currently, intrinsics always need a location to store
// the result. so we create a temporary alloca for the
// result
// the result, so we create a temporary `alloca` for the
// result.
let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
tmp.storage_live(bx);
ReturnDest::IndirectOperand(tmp, index)
@ -1137,7 +1137,7 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if dest.align < dest.layout.align.abi {
// Currently, MIR code generation does not create calls
// that store directly to fields of packed structs (in
// fact, the calls it creates write only to temps),
// fact, the calls it creates write only to temps).
//
// If someone changes that, please update this code path
// to create a temporary.
@ -1232,12 +1232,12 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
enum ReturnDest<'tcx, V> {
// Do nothing, the return value is indirect or ignored
// Do nothing; the return value is indirect or ignored.
Nothing,
// Store the return value to the pointer
// Store the return value to the pointer.
Store(PlaceRef<'tcx, V>),
// Stores an indirect return value to an operand local place
// Store an indirect return value to an operand local place.
IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
// Stores a direct return value to an operand local place
// Store a direct return value to an operand local place.
DirectOperand(mir::Local)
}

View File

@ -120,7 +120,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
bx.struct_gep(self.llval, bx.cx().backend_field_index(self.layout, ix))
};
PlaceRef {
// HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
// HACK(eddyb): have to bitcast pointers until LLVM removes pointee types.
llval: bx.pointercast(llval, bx.cx().type_ptr_to(bx.cx().backend_type(field))),
llextra: if bx.cx().type_has_metadata(field.ty) {
self.llextra
@ -134,7 +134,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
// Simple cases, which don't need DST adjustment:
// * no metadata available - just log the case
// * known alignment - sized types, [T], str or a foreign type
// * known alignment - sized types, `[T]`, `str` or a foreign type
// * packed struct - there is no alignment padding
match field.ty.sty {
_ if self.llextra.is_none() => {
@ -156,18 +156,19 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
}
// We need to get the pointer manually now.
// We do this by casting to a *i8, then offsetting it by the appropriate amount.
// We do this by casting to a `*i8`, then offsetting it by the appropriate amount.
// We do this instead of, say, simply adjusting the pointer from the result of a GEP
// because the field may have an arbitrary alignment in the LLVM representation
// anyway.
//
// To demonstrate:
// struct Foo<T: ?Sized> {
// x: u16,
// y: T
// }
//
// The type Foo<Foo<Trait>> is represented in LLVM as { u16, { u16, u8 }}, meaning that
// struct Foo<T: ?Sized> {
// x: u16,
// y: T
// }
//
// The type `Foo<Foo<Trait>>` is represented in LLVM as `{ u16, { u16, u8 }}`, meaning that
// the `y` field has 16-bit alignment.
let meta = self.llextra;
@ -180,9 +181,9 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
// Bump the unaligned offset up to the appropriate alignment using the
// following expression:
//
// (unaligned offset + (align - 1)) & -align
// (unaligned offset + (align - 1)) & -align
// Calculate offset
// Calculate offset.
let align_sub_1 = bx.sub(unsized_align, bx.cx().const_usize(1u64));
let and_lhs = bx.add(unaligned_offset, align_sub_1);
let and_rhs = bx.neg(unsized_align);
@ -190,11 +191,11 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
debug!("struct_field_ptr: DST field offset: {:?}", offset);
// Cast and adjust pointer
// Cast and adjust pointer.
let byte_ptr = bx.pointercast(self.llval, bx.cx().type_i8p());
let byte_ptr = bx.gep(byte_ptr, &[offset]);
// Finally, cast back to the type expected
// Finally, cast back to the type expected.
let ll_fty = bx.cx().backend_type(field);
debug!("struct_field_ptr: Field type is {:?}", ll_fty);
@ -235,7 +236,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
// We use `i1` for bytes that are always `0` or `1`,
// e.g., `#[repr(i8)] enum E { A, B }`, but we can't
// let LLVM interpret the `i1` as signed, because
// then `i1 1` (i.e., E::B) is effectively `i8 -1`.
// then `i1 1` (i.e., `E::B`) is effectively `i8 -1`.
layout::Int(_, signed) => !discr_scalar.is_bool() && signed,
_ => false
};
@ -248,9 +249,9 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
} => {
let niche_llty = bx.cx().immediate_backend_type(discr.layout);
if niche_variants.start() == niche_variants.end() {
// FIXME(eddyb) Check the actual primitive type here.
// FIXME(eddyb): check the actual primitive type here.
let niche_llval = if niche_start == 0 {
// HACK(eddyb) Using `c_null` as it works on all types.
// HACK(eddyb): using `c_null` as it works on all types.
bx.cx().const_null(niche_llty)
} else {
bx.cx().const_uint_big(niche_llty, niche_start)
@ -314,7 +315,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
if variant_index != dataful_variant {
if bx.cx().sess().target.target.arch == "arm" ||
bx.cx().sess().target.target.arch == "aarch64" {
// Issue #34427: As workaround for LLVM bug on ARM,
// FIXME(#34427): as workaround for LLVM bug on ARM,
// use memset of 0 before assigning niche value.
let fill_byte = bx.cx().const_u8(0);
let size = bx.cx().const_usize(self.layout.size.bytes());
@ -326,9 +327,9 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> {
let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
let niche_value = (niche_value as u128)
.wrapping_add(niche_start);
// FIXME(eddyb) Check the actual primitive type here.
// FIXME(eddyb): check the actual primitive type here.
let niche_llval = if niche_value == 0 {
// HACK(eddyb) Using `c_null` as it works on all types.
// HACK(eddyb): using `c_null` as it works on all types.
bx.cx().const_null(niche_llty)
} else {
bx.cx().const_uint_big(niche_llty, niche_value)
@ -429,10 +430,10 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
_ => bug!("promoteds should have an allocation: {:?}", val),
},
Err(_) => {
// this is unreachable as long as runtime
// This is unreachable as long as runtime
// and compile-time agree on values
// With floats that won't always be true
// so we generate an abort
// With floats that won't always be true,
// so we generate an abort.
bx.abort();
let llval = bx.cx().const_undef(
bx.cx().type_ptr_to(bx.cx().backend_type(layout))
@ -502,7 +503,7 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
// Cast the place pointer type to the new
// array or slice type (*[%_; new_len]).
// array or slice type (`*[%_; new_len]`).
subslice.llval = bx.pointercast(subslice.llval,
bx.cx().type_ptr_to(bx.cx().backend_type(subslice.layout)));

View File

@ -196,7 +196,7 @@ declare_lint_pass!(UnsafeCode => [UNSAFE_CODE]);
impl UnsafeCode {
fn report_unsafe(&self, cx: &EarlyContext<'_>, span: Span, desc: &'static str) {
// This comes from a macro that has #[allow_internal_unsafe].
// This comes from a macro that has `#[allow_internal_unsafe]`.
if span.allows_unsafe() {
return;
}
@ -216,7 +216,7 @@ impl EarlyLintPass for UnsafeCode {
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
if let ast::ExprKind::Block(ref blk, _) = e.node {
// Don't warn about generated blocks, that'll just pollute the output.
// Don't warn about generated blocks; that'll just pollute the output.
if blk.rules == ast::BlockCheckMode::Unsafe(ast::UserProvided) {
self.report_unsafe(cx, blk.span, "usage of an `unsafe` block");
}
@ -335,7 +335,7 @@ impl MissingDoc {
// Only check publicly-visible items, using the result from the privacy pass.
// It's an option so the crate root can also use this function (it doesn't
// have a NodeId).
// have a `NodeId`).
if let Some(id) = id {
if !cx.access_levels.is_exported(id) {
return;
@ -389,7 +389,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc {
hir::ItemKind::Struct(..) => "a struct",
hir::ItemKind::Union(..) => "a union",
hir::ItemKind::Trait(.., ref trait_item_refs) => {
// Issue #11592, traits are always considered exported, even when private.
// Issue #11592: traits are always considered exported, even when private.
if let hir::VisibilityKind::Inherited = it.vis.node {
self.private_traits.insert(it.hir_id);
for trait_item_ref in trait_item_refs {
@ -401,7 +401,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc {
}
hir::ItemKind::Ty(..) => "a type alias",
hir::ItemKind::Impl(.., Some(ref trait_ref), _, ref impl_item_refs) => {
// If the trait is private, add the impl items to private_traits so they don't get
// If the trait is private, add the impl items to `private_traits` so they don't get
// reported for missing docs.
let real_trait = trait_ref.path.res.def_id();
if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(real_trait) {
@ -1215,7 +1215,6 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TrivialConstraints {
use rustc::ty::fold::TypeFoldable;
use rustc::ty::Predicate::*;
if cx.tcx.features().trivial_bounds {
let def_id = cx.tcx.hir().local_def_id_from_hir_id(item.hir_id);
let predicates = cx.tcx.predicates_of(def_id);
@ -1464,7 +1463,7 @@ impl KeywordIdents {
_ => return,
};
// don't lint `r#foo`
// Don't lint `r#foo`.
if cx.sess.parse_sess.raw_identifier_spans.borrow().contains(&ident.span) {
return;
}
@ -1717,8 +1716,6 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ExplicitOutlivesRequirements {
);
err.emit();
}
}
}
}

View File

@ -576,7 +576,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
/// Adds a suggestion to errors where a `impl Trait` is returned.
///
/// ```text
/// help: to allow this impl Trait to capture borrowed data with lifetime `'1`, add `'_` as
/// help: to allow this `impl Trait` to capture borrowed data with lifetime `'1`, add `'_` as
/// a constraint
/// |
/// LL | fn iter_values_anon(&self) -> impl Iterator<Item=u32> + 'a {
@ -652,7 +652,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
diag.span_suggestion(
span,
&format!(
"to allow this impl Trait to capture borrowed data with lifetime \
"to allow this `impl Trait` to capture borrowed data with lifetime \
`{}`, add `{}` as a constraint",
fr_name, suggestable_fr_name,
),

View File

@ -289,9 +289,9 @@ pub enum BlockFrame {
/// Evaluation is currently within a statement.
///
/// Examples include:
/// 1. `EXPR;`
/// 2. `let _ = EXPR;`
/// 3. `let x = EXPR;`
/// 1. `EXPR;`
/// 2. `let _ = EXPR;`
/// 3. `let x = EXPR;`
Statement {
/// If true, then statement discards result from evaluating
/// the expression (such as examples 1 and 2 above).

View File

@ -90,7 +90,7 @@
//!
//! Note though that as a side-effect of creating a codegen units per
//! source-level module, functions from the same module will be available for
//! inlining, even when they are not marked #[inline].
//! inlining, even when they are not marked `#[inline]`.
use std::collections::hash_map::Entry;
use std::cmp;
@ -152,7 +152,7 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// In the next step, we use the inlining map to determine which additional
// monomorphizations have to go into each codegen unit. These additional
// monomorphizations can be drop-glue, functions from external crates, and
// local functions the definition of which is marked with #[inline].
// local functions the definition of which is marked with `#[inline]`.
let mut post_inlining = place_inlined_mono_items(initial_partitioning,
inlining_map);
@ -166,7 +166,7 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
internalize_symbols(tcx, &mut post_inlining, inlining_map);
}
// Finally, sort by codegen unit name, so that we get deterministic results
// Finally, sort by codegen unit name, so that we get deterministic results.
let PostInliningPartitioning {
codegen_units: mut result,
mono_item_placements: _,
@ -258,8 +258,8 @@ fn place_root_mono_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
roots.insert(mono_item);
}
// always ensure we have at least one CGU; otherwise, if we have a
// crate with just types (for example), we could wind up with no CGU
// Always ensure we have at least one CGU; otherwise, if we have a
// crate with just types (for example), we could wind up with no CGU.
if codegen_units.is_empty() {
let codegen_unit_name = fallback_cgu_name(cgu_name_builder);
codegen_units.insert(codegen_unit_name.clone(),
@ -300,10 +300,10 @@ fn mono_item_visibility(
export_generics: bool,
) -> Visibility {
let instance = match mono_item {
// This is pretty complicated, go below
// This is pretty complicated; see below.
MonoItem::Fn(instance) => instance,
// Misc handling for generics and such, but otherwise
// Misc handling for generics and such, but otherwise:
MonoItem::Static(def_id) => {
return if tcx.is_reachable_non_generic(*def_id) {
*can_be_internalized = false;
@ -358,11 +358,10 @@ fn mono_item_visibility(
let is_generic = instance.substs.non_erasable_generics().next().is_some();
// Upstream `DefId` instances get different handling than local ones
// Upstream `DefId` instances get different handling than local ones.
if !def_id.is_local() {
return if export_generics && is_generic {
// If it is a upstream monomorphization
// and we export generics, we must make
// If it is a upstream monomorphization and we export generics, we must make
// it available to downstream crates.
*can_be_internalized = false;
default_visibility(tcx, def_id, true)
@ -374,20 +373,16 @@ fn mono_item_visibility(
if is_generic {
if export_generics {
if tcx.is_unreachable_local_definition(def_id) {
// This instance cannot be used
// from another crate.
// This instance cannot be used from another crate.
Visibility::Hidden
} else {
// This instance might be useful in
// a downstream crate.
// This instance might be useful in a downstream crate.
*can_be_internalized = false;
default_visibility(tcx, def_id, true)
}
} else {
// We are not exporting generics or
// the definition is not reachable
// for downstream crates, we can
// internalize its instantiations.
// We are not exporting generics or the definition is not reachable
// for downstream crates, we can internalize its instantiations.
Visibility::Hidden
}
} else {
@ -449,19 +444,19 @@ fn default_visibility(tcx: TyCtxt<'_, '_, '_>, id: DefId, is_generic: bool) -> V
return Visibility::Default
}
// Generic functions never have export level C
// Generic functions never have export-level C.
if is_generic {
return Visibility::Hidden
}
// Things with export level C don't get instantiated in
// downstream crates
// downstream crates.
if !id.is_local() {
return Visibility::Hidden
}
// C-export level items remain at `Default`, all other internal
// items become `Hidden`
// items become `Hidden`.
match tcx.reachable_non_generics(id.krate).get(&id) {
Some(SymbolExportLevel::C) => Visibility::Default,
_ => Visibility::Hidden,
@ -519,7 +514,7 @@ fn place_inlined_mono_items<'tcx>(initial_partitioning: PreInliningPartitioning<
let single_codegen_unit = initial_cgus.len() == 1;
for old_codegen_unit in initial_cgus {
// Collect all items that need to be available in this codegen unit
// Collect all items that need to be available in this codegen unit.
let mut reachable = FxHashSet::default();
for root in old_codegen_unit.items().keys() {
follow_inlining(*root, inlining_map, &mut reachable);
@ -527,10 +522,10 @@ fn place_inlined_mono_items<'tcx>(initial_partitioning: PreInliningPartitioning<
let mut new_codegen_unit = CodegenUnit::new(old_codegen_unit.name().clone());
// Add all monomorphizations that are not already there
// Add all monomorphizations that are not already there.
for mono_item in reachable {
if let Some(linkage) = old_codegen_unit.items().get(&mono_item) {
// This is a root, just copy it over
// This is a root, just copy it over.
new_codegen_unit.items_mut().insert(mono_item, *linkage);
} else {
if roots.contains(&mono_item) {
@ -538,7 +533,7 @@ fn place_inlined_mono_items<'tcx>(initial_partitioning: PreInliningPartitioning<
{:?}", mono_item);
}
// This is a cgu-private copy
// This is a CGU-private copy.
new_codegen_unit.items_mut().insert(
mono_item,
(Linkage::Internal, Visibility::Default),
@ -547,7 +542,7 @@ fn place_inlined_mono_items<'tcx>(initial_partitioning: PreInliningPartitioning<
if !single_codegen_unit {
// If there is more than one codegen unit, we need to keep track
// in which codegen units each monomorphization is placed:
// in which codegen units each monomorphization is placed.
match mono_item_placements.entry(mono_item) {
Entry::Occupied(e) => {
let placement = e.into_mut();
@ -656,8 +651,8 @@ fn internalize_symbols<'a, 'tcx>(_tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
fn characteristic_def_id_of_mono_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mono_item: MonoItem<'tcx>)
-> Option<DefId> {
mono_item: MonoItem<'tcx>)
-> Option<DefId> {
match mono_item {
MonoItem::Fn(instance) => {
let def_id = match instance.def {
@ -709,10 +704,10 @@ fn compute_codegen_unit_name(tcx: TyCtxt<'_, '_, '_>,
volatile: bool,
cache: &mut CguNameCache)
-> InternedString {
// Find the innermost module that is not nested within a function
// Find the innermost module that is not nested within a function.
let mut current_def_id = def_id;
let mut cgu_def_id = None;
// Walk backwards from the item we want to find the module for:
// Walk backwards from the item we want to find the module for.
loop {
if current_def_id.index == CRATE_DEF_INDEX {
if cgu_def_id.is_none() {

View File

@ -1,4 +1,4 @@
// Validate AST before lowering it to HIR
// Validate AST before lowering it to HIR.
//
// This pass is supposed to catch things that fit into AST data structures,
// but not permitted by the language. It runs after expansion when AST is frozen,
@ -56,7 +56,7 @@ struct AstValidator<'a> {
/// Used to ban nested `impl Trait`, e.g., `impl Into<impl Debug>`.
/// Nested `impl Trait` _is_ allowed in associated type position,
/// e.g `impl Iterator<Item=impl Debug>`
/// e.g., `impl Iterator<Item = impl Debug>`.
outer_impl_trait: Option<OuterImplTrait>,
/// Used to ban `impl Trait` in path projections like `<impl Iterator>::Item`
@ -94,9 +94,9 @@ impl<'a> AstValidator<'a> {
}
fn visit_assoc_type_binding_from_generic_args(&mut self, type_binding: &'a TypeBinding) {
// rust-lang/rust#57979: bug in old visit_generic_args called
// walk_ty rather than visit_ty, skipping outer `impl Trait`
// if it happened to occur at `type_binding.ty`
// rust-lang/rust#57979: bug in old `visit_generic_args` called
// `walk_ty` rather than `visit_ty`, skipping outer `impl Trait`
// if it happened to occur at `type_binding.ty`.
if let TyKind::ImplTrait(..) = type_binding.ty.node {
self.warning_period_57979_didnt_record_next_impl_trait = true;
}
@ -104,9 +104,9 @@ impl<'a> AstValidator<'a> {
}
fn visit_ty_from_generic_args(&mut self, ty: &'a Ty) {
// rust-lang/rust#57979: bug in old visit_generic_args called
// walk_ty rather than visit_ty, skippping outer `impl Trait`
// if it happened to occur at `ty`
// rust-lang/rust#57979: bug in old `visit_generic_args` called
// `walk_ty` rather than `visit_ty`, skippping outer `impl Trait`
// if it happened to occur at `ty`.
if let TyKind::ImplTrait(..) = ty.node {
self.warning_period_57979_didnt_record_next_impl_trait = true;
}
@ -117,10 +117,10 @@ impl<'a> AstValidator<'a> {
let only_recorded_since_pull_request_57730 =
self.warning_period_57979_didnt_record_next_impl_trait;
// (this flag is designed to be set to true and then only
// (This flag is designed to be set to `true`, and then only
// reach the construction point for the outer impl trait once,
// so its safe and easiest to unconditionally reset it to
// false)
// false.)
self.warning_period_57979_didnt_record_next_impl_trait = false;
OuterImplTrait {
@ -128,7 +128,7 @@ impl<'a> AstValidator<'a> {
}
}
// Mirrors visit::walk_ty, but tracks relevant state
// Mirrors `visit::walk_ty`, but tracks relevant state.
fn walk_ty(&mut self, t: &'a Ty) {
match t.node {
TyKind::ImplTrait(..) => {
@ -619,15 +619,18 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
// Auto traits cannot have generics, super traits nor contain items.
if !generics.params.is_empty() {
struct_span_err!(self.session, item.span, E0567,
"auto traits cannot have generic parameters").emit();
"auto traits cannot have generic parameters"
).emit();
}
if !bounds.is_empty() {
struct_span_err!(self.session, item.span, E0568,
"auto traits cannot have super traits").emit();
"auto traits cannot have super traits"
).emit();
}
if !trait_items.is_empty() {
struct_span_err!(self.session, item.span, E0380,
"auto traits cannot have methods or associated items").emit();
"auto traits cannot have methods or associated items"
).emit();
}
}
self.no_questions_in_bounds(bounds, "supertraits", true);
@ -699,7 +702,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
visit::walk_foreign_item(self, fi)
}
// Mirrors visit::walk_generic_args, but tracks relevant state
// Mirrors `visit::walk_generic_args`, but tracks relevant state.
fn visit_generic_args(&mut self, _: Span, generic_args: &'a GenericArgs) {
match *generic_args {
GenericArgs::AngleBracketed(ref data) => {
@ -718,7 +721,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
generic_args.span(),
);
// Type bindings such as `Item=impl Debug` in `Iterator<Item=Debug>`
// Type bindings such as `Item = impl Debug` in `Iterator<Item = Debug>`
// are allowed to contain nested `impl Trait`.
self.with_impl_trait(None, |this| {
walk_list!(this, visit_assoc_type_binding_from_generic_args, &data.bindings);

View File

@ -2134,7 +2134,7 @@ impl<'a> Resolver<'a> {
record_used_id: Option<NodeId>,
path_span: Span)
-> Option<LexicalScopeBinding<'a>> {
assert!(ns == TypeNS || ns == ValueNS);
assert!(ns == TypeNS || ns == ValueNS);
if ident.name == kw::Invalid {
return Some(LexicalScopeBinding::Res(Res::Err));
}
@ -2530,10 +2530,12 @@ impl<'a> Resolver<'a> {
match item.node {
ItemKind::Ty(_, ref generics) |
ItemKind::Fn(_, _, ref generics, _) |
ItemKind::Existential(_, ref generics) => {
self.with_generic_param_rib(HasGenericParams(generics, ItemRibKind),
|this| visit::walk_item(this, item));
ItemKind::Existential(_, ref generics) |
ItemKind::Fn(_, _, ref generics, _) => {
self.with_generic_param_rib(
HasGenericParams(generics, ItemRibKind),
|this| visit::walk_item(this, item)
);
}
ItemKind::Enum(_, ref generics) |
@ -2967,7 +2969,7 @@ impl<'a> Resolver<'a> {
binding_map
}
// check that all of the arms in an or-pattern have exactly the
// Checks that all of the arms in an or-pattern have exactly the
// same set of bindings, with the same binding modes for each.
fn check_consistent_bindings(&mut self, pats: &[P<Pat>]) {
if pats.is_empty() {
@ -2987,7 +2989,7 @@ impl<'a> Resolver<'a> {
let map_j = self.binding_mode_map(&q);
for (&key, &binding_i) in &map_i {
if map_j.is_empty() { // Account for missing bindings when
let binding_error = missing_vars // map_j has none.
let binding_error = missing_vars // `map_j` has none.
.entry(key.name)
.or_insert(BindingError {
name: key.name,

View File

@ -47,14 +47,14 @@ pub trait AstConv<'gcx, 'tcx> {
fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
-> &'tcx ty::GenericPredicates<'tcx>;
/// What lifetime should we use when a lifetime is omitted (and not elided)?
/// Returns the lifetime to use when a lifetime is omitted (and not elided).
fn re_infer(&self, span: Span, _def: Option<&ty::GenericParamDef>)
-> Option<ty::Region<'tcx>>;
/// What type should we use when a type is omitted?
/// Returns the type to use when a type is omitted.
fn ty_infer(&self, span: Span) -> Ty<'tcx>;
/// Same as ty_infer, but with a known type parameter definition.
/// Same as `ty_infer`, but with a known type parameter definition.
fn ty_infer_for_def(&self,
_def: &ty::GenericParamDef,
span: Span) -> Ty<'tcx> {
@ -376,8 +376,10 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
}
err.emit();
(provided > required, // `suppress_error`
potential_assoc_types)
(
provided > required, // `suppress_error`
potential_assoc_types,
)
};
if reported_late_bound_region_err.is_none()
@ -556,7 +558,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
}
/// Given the type/lifetime/const arguments provided to some path (along with
/// an implicit `Self`, if this is a trait reference) returns the complete
/// an implicit `Self`, if this is a trait reference), returns the complete
/// set of substitutions. This may involve applying defaulted type parameters.
///
/// Note that the type listing given here is *exactly* what the user provided.
@ -708,8 +710,8 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
/// are disallowed. Otherwise, they are pushed onto the vector given.
pub fn instantiate_mono_trait_ref(&self,
trait_ref: &hir::TraitRef,
self_ty: Ty<'tcx>)
-> ty::TraitRef<'tcx>
self_ty: Ty<'tcx>
) -> ty::TraitRef<'tcx>
{
self.prohibit_generics(trait_ref.path.segments.split_last().unwrap().1);
@ -724,8 +726,8 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
trait_ref: &hir::TraitRef,
self_ty: Ty<'tcx>,
poly_projections: &mut Vec<(ty::PolyProjectionPredicate<'tcx>, Span)>,
speculative: bool)
-> (ty::PolyTraitRef<'tcx>, Option<Vec<Span>>)
speculative: bool,
) -> (ty::PolyTraitRef<'tcx>, Option<Vec<Span>>)
{
let trait_def_id = trait_ref.trait_def_id();
@ -851,13 +853,13 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
// trait SubTrait: SuperTrait<int> { }
// trait SuperTrait<A> { type T; }
//
// ... B : SubTrait<T=foo> ...
// ... B: SubTrait<T = foo> ...
// ```
//
// We want to produce `<B as SuperTrait<int>>::T == foo`.
// Find any late-bound regions declared in `ty` that are not
// declared in the trait-ref. These are not wellformed.
// declared in the trait-ref. These are not well-formed.
//
// Example:
//
@ -1716,7 +1718,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
let span = path.span;
match path.res {
Res::Def(DefKind::Existential, did) => {
// Check for desugared impl trait.
// Check for desugared `impl Trait`.
assert!(ty::is_impl_trait_defn(tcx, did).is_none());
let item_segment = path.segments.split_last().unwrap();
self.prohibit_generics(item_segment.1);
@ -1767,19 +1769,19 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
&tcx.hir().local_def_id_from_hir_id(hir_id)];
tcx.mk_ty_param(index, tcx.hir().name_by_hir_id(hir_id).as_interned_str())
}
Res::SelfTy(_, Some(def_id)) => {
// `Self` in impl (we know the concrete type).
assert_eq!(opt_self_ty, None);
self.prohibit_generics(&path.segments);
// Try to evaluate any array length constants
self.normalize_ty(span, tcx.at(span).type_of(def_id))
}
Res::SelfTy(Some(_), None) => {
// `Self` in trait.
assert_eq!(opt_self_ty, None);
self.prohibit_generics(&path.segments);
tcx.mk_self_type()
}
Res::SelfTy(_, Some(def_id)) => {
// `Self` in impl (we know the concrete type).
assert_eq!(opt_self_ty, None);
self.prohibit_generics(&path.segments);
// Try to evaluate any array length constants.
self.normalize_ty(span, tcx.at(span).type_of(def_id))
}
Res::Def(DefKind::AssocTy, def_id) => {
debug_assert!(path.segments.len() >= 2);
self.prohibit_generics(&path.segments[..path.segments.len() - 2]);
@ -1829,7 +1831,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
}
hir::TyKind::Rptr(ref region, ref mt) => {
let r = self.ast_region_to_region(region, None);
debug!("Ref r={:?}", r);
debug!("ast_ty_to_ty: r={:?}", r);
let t = self.ast_ty_to_ty(&mt.ty);
tcx.mk_ref(r, ty::TypeAndMut {ty: t, mutbl: mt.mutbl})
}
@ -1856,7 +1858,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
hir::TyKind::Def(item_id, ref lifetimes) => {
let did = tcx.hir().local_def_id_from_hir_id(item_id.id);
self.impl_trait_ty_to_ty(did, lifetimes)
},
}
hir::TyKind::Path(hir::QPath::TypeRelative(ref qself, ref segment)) => {
debug!("ast_ty_to_ty: qself={:?} segment={:?}", qself, segment);
let ty = self.ast_ty_to_ty(qself);
@ -1889,9 +1891,6 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
// handled specially and will not descend into this routine.
self.ty_infer(ast_ty.span)
}
hir::TyKind::Err => {
tcx.types.err
}
hir::TyKind::CVarArgs(lt) => {
let va_list_did = match tcx.lang_items().va_list() {
Some(did) => did,
@ -1901,6 +1900,9 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
let region = self.ast_region_to_region(&lt, None);
tcx.type_of(va_list_did).subst(tcx, &[region.into()])
}
hir::TyKind::Err => {
tcx.types.err
}
};
self.record_ty(ast_ty.hir_id, result_ty, ast_ty.span);
@ -1979,7 +1981,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
_ => bug!()
}
} else {
// Replace all parent lifetimes with 'static.
// Replace all parent lifetimes with `'static`.
match param.kind {
GenericParamDefKind::Lifetime => {
tcx.lifetimes.re_static.into()

View File

@ -173,7 +173,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
// here, we would coerce from `!` to `?T`.
let b = self.shallow_resolve(b);
return if self.shallow_resolve(b).is_ty_var() {
// micro-optimization: no need for this if `b` is
// Micro-optimization: no need for this if `b` is
// already resolved in some way.
let diverging_ty = self.next_diverging_ty_var(
TypeVariableOrigin::AdjustmentType(self.cause.span));

View File

@ -518,10 +518,10 @@ pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
/// eventually).
param_env: ty::ParamEnv<'tcx>,
// Number of errors that had been reported when we started
// checking this function. On exit, if we find that *more* errors
// have been reported, we will skip regionck and other work that
// expects the types within the function to be consistent.
/// Number of errors that had been reported when we started
/// checking this function. On exit, if we find that *more* errors
/// have been reported, we will skip regionck and other work that
/// expects the types within the function to be consistent.
err_count_on_creation: usize,
ret_coercion: Option<RefCell<DynamicCoerceMany<'gcx, 'tcx>>>,

View File

@ -450,38 +450,38 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
let generics = self.tcx().generics_of(def_id);
let definition_ty = if generics.parent.is_some() {
// impl trait
// `impl Trait`
self.fcx.infer_opaque_definition_from_instantiation(
def_id,
opaque_defn,
instantiated_ty,
)
} else {
// prevent
// Prevent:
// * `fn foo<T>() -> Foo<T>`
// * `fn foo<T: Bound + Other>() -> Foo<T>`
// from being defining
// from being defining.
// Also replace all generic params with the ones from the existential type
// definition so
// definition so that
// ```rust
// existential type Foo<T>: 'static;
// fn foo<U>() -> Foo<U> { .. }
// ```
// figures out the concrete type with `U`, but the stored type is with `T`
// figures out the concrete type with `U`, but the stored type is with `T`.
instantiated_ty.fold_with(&mut BottomUpFolder {
tcx: self.tcx().global_tcx(),
ty_op: |ty| {
trace!("checking type {:?}", ty);
// find a type parameter
// Find a type parameter.
if let ty::Param(..) = ty.sty {
// look it up in the substitution list
// Look it up in the substitution list.
assert_eq!(opaque_defn.substs.len(), generics.params.len());
for (subst, param) in opaque_defn.substs.iter().zip(&generics.params) {
if let UnpackedKind::Type(subst) = subst.unpack() {
if subst == ty {
// found it in the substitution list, replace with the
// parameter from the existential type
// Found it in the substitution list; replace with the
// parameter from the existential type.
return self.tcx()
.global_tcx()
.mk_ty_param(param.index, param.name);
@ -505,16 +505,15 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
},
lt_op: |region| {
match region {
// Skip static and bound regions: they don't
// require substitution.
// Skip static and bound regions: they don't require substitution.
ty::ReStatic | ty::ReLateBound(..) => region,
_ => {
trace!("checking {:?}", region);
for (subst, p) in opaque_defn.substs.iter().zip(&generics.params) {
if let UnpackedKind::Lifetime(subst) = subst.unpack() {
if subst == region {
// found it in the substitution list, replace with the
// parameter from the existential type
// Found it in the substitution list; replace with the
// parameter from the existential type.
let reg = ty::EarlyBoundRegion {
def_id: p.def_id,
index: p.index,
@ -586,8 +585,8 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
if let ty::Opaque(defin_ty_def_id, _substs) = definition_ty.sty {
if def_id == defin_ty_def_id {
// Concrete type resolved to the existential type itself
// Force a cycle error
// Concrete type resolved to the existential type itself.
// Force a cycle error.
// FIXME(oli-obk): we could just not insert it into `concrete_existential_types`
// which simply would make this use not a defining use.
self.tcx().at(span).type_of(defin_ty_def_id);

View File

@ -1,6 +1,6 @@
//! "Collection" is the process of determining the type and other external
//! details of each item in Rust. Collection is specifically concerned
//! with *interprocedural* things -- for example, for a function
//! with *inter-procedural* things -- for example, for a function
//! definition, collection will figure out the type and signature of the
//! function, but it will not visit the *body* of the function in any way,
//! nor examine type annotations on local variables (that's the job of
@ -233,7 +233,7 @@ impl<'a, 'tcx> AstConv<'tcx, 'tcx> for ItemCtxt<'a, 'tcx> {
}
fn set_tainted_by_errors(&self) {
// no obvious place to track this, just let it go
// no obvious place to track this, so just let it go
}
fn record_ty(&self, _hir_id: hir::HirId, _ty: Ty<'tcx>, _span: Span) {
@ -447,7 +447,7 @@ fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_id: hir::HirId) {
}
}
// Desugared from `impl Trait` -> visited by the function's return type
// Desugared from `impl Trait`, so visited by the function's return type.
hir::ItemKind::Existential(hir::ExistTy {
impl_trait_fn: Some(_),
..
@ -1218,7 +1218,7 @@ pub fn checked_type_of<'a, 'tcx>(
impl_trait_fn: None,
..
}) => find_existential_constraints(tcx, def_id),
// existential types desugared from impl Trait
// Existential types desugared from `impl Trait`.
ItemKind::Existential(hir::ExistTy {
impl_trait_fn: Some(owner),
..
@ -1472,11 +1472,13 @@ fn find_existential_constraints<'a, 'tcx>(
) -> Ty<'tcx> {
use rustc::hir::{ImplItem, Item, TraitItem};
debug!("find_existential_constraints({:?})", def_id);
struct ConstraintLocator<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
// First found type span, actual type, mapping from the existential type's generic
// parameters to the concrete type's generic parameters
// (first found type span, actual type, mapping from the existential type's generic
// parameters to the concrete type's generic parameters)
//
// The mapping is an index for each use site of a generic parameter in the concrete type
//
@ -1502,18 +1504,21 @@ fn find_existential_constraints<'a, 'tcx>(
let span = self.tcx.def_span(def_id);
// used to quickly look up the position of a generic parameter
let mut index_map: FxHashMap<ty::ParamTy, usize> = FxHashMap::default();
// skip binder is ok, since we only use this to find generic parameters and their
// positions.
// Skipping binder is ok, since we only use this to find generic parameters and
// their positions.
for (idx, subst) in substs.iter().enumerate() {
if let UnpackedKind::Type(ty) = subst.unpack() {
if let ty::Param(p) = ty.sty {
if index_map.insert(p, idx).is_some() {
// there was already an entry for `p`, meaning a generic parameter
// was used twice
// There was already an entry for `p`, meaning a generic parameter
// was used twice.
self.tcx.sess.span_err(
span,
&format!("defining existential type use restricts existential \
type by using the generic parameter `{}` twice", p.name),
&format!(
"defining existential type use restricts existential \
type by using the generic parameter `{}` twice",
p.name
),
);
return;
}
@ -1528,8 +1533,8 @@ fn find_existential_constraints<'a, 'tcx>(
}
}
}
// compute the index within the existential type for each generic parameter used in
// the concrete type
// Compute the index within the existential type for each generic parameter used in
// the concrete type.
let indices = concrete_type
.subst(self.tcx, substs)
.walk()
@ -1607,7 +1612,7 @@ fn find_existential_constraints<'a, 'tcx>(
}
fn visit_item(&mut self, it: &'tcx Item) {
let def_id = self.tcx.hir().local_def_id_from_hir_id(it.hir_id);
// the existential type itself or its children are not within its reveal scope
// The existential type itself or its children are not within its reveal scope.
if def_id != self.def_id {
self.check(def_id);
intravisit::walk_item(self, it);
@ -1615,7 +1620,7 @@ fn find_existential_constraints<'a, 'tcx>(
}
fn visit_impl_item(&mut self, it: &'tcx ImplItem) {
let def_id = self.tcx.hir().local_def_id_from_hir_id(it.hir_id);
// the existential type itself or its children are not within its reveal scope
// The existential type itself or its children are not within its reveal scope.
if def_id != self.def_id {
self.check(def_id);
intravisit::walk_impl_item(self, it);
@ -1960,7 +1965,7 @@ fn explicit_predicates_of<'a, 'tcx>(
let substs = InternalSubsts::identity_for_item(tcx, def_id);
let opaque_ty = tcx.mk_opaque(def_id, substs);
// Collect the bounds, i.e., the `A+B+'c` in `impl A+B+'c`.
// Collect the bounds, i.e., the `A + B + 'c` in `impl A + B + 'c`.
let bounds = compute_bounds(
&icx,
opaque_ty,
@ -2006,7 +2011,7 @@ fn explicit_predicates_of<'a, 'tcx>(
let substs = InternalSubsts::identity_for_item(tcx, def_id);
let opaque_ty = tcx.mk_opaque(def_id, substs);
// Collect the bounds, i.e., the `A+B+'c` in `impl A+B+'c`.
// Collect the bounds, i.e., the `A + B + 'c` in `impl A + B + 'c`.
let bounds = compute_bounds(
&icx,
opaque_ty,
@ -2016,7 +2021,7 @@ fn explicit_predicates_of<'a, 'tcx>(
);
if impl_trait_fn.is_some() {
// impl Trait
// opaque types
return tcx.arena.alloc(ty::GenericPredicates {
parent: None,
predicates: bounds.predicates(tcx, opaque_ty),
@ -2093,7 +2098,7 @@ fn explicit_predicates_of<'a, 'tcx>(
}
// Collect the predicates that were written inline by the user on each
// type parameter (e.g., `<T:Foo>`).
// type parameter (e.g., `<T: Foo>`).
for param in &ast_generics.params {
if let GenericParamKind::Type { .. } = param.kind {
let name = param.name.ident().as_interned_str();
@ -2106,7 +2111,7 @@ fn explicit_predicates_of<'a, 'tcx>(
}
}
// Add in the bounds that appear in the where-clause
// Add in the bounds that appear in the where-clause.
let where_clause = &ast_generics.where_clause;
for predicate in &where_clause.predicates {
match predicate {
@ -2422,7 +2427,7 @@ fn from_target_feature(
continue;
}
// Must be of the form `enable = "..."` ( a string)
// Must be of the form `enable = "..."` (a string).
let value = match item.value_str() {
Some(value) => value,
None => {
@ -2545,7 +2550,7 @@ fn codegen_fn_attrs<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> Codegen
if tcx.is_foreign_item(id) {
codegen_fn_attrs.flags |= CodegenFnAttrFlags::FFI_RETURNS_TWICE;
} else {
// `#[ffi_returns_twice]` is only allowed `extern fn`s
// `#[ffi_returns_twice]` is only allowed `extern fn`s.
struct_span_err!(
tcx.sess,
attr.span,

View File

@ -379,8 +379,8 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
/// A quasi-deprecated helper used in rustdoc and clippy to get
/// the type from a HIR node.
pub fn hir_ty_to_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir_ty: &hir::Ty) -> Ty<'tcx> {
// In case there are any projections etc, find the "environment"
// def-id that will be used to determine the traits/predicates in
// In case there are any projections, etc., find the "environment"
// def-ID that will be used to determine the traits/predicates in
// scope. This is derived from the enclosing item-like thing.
let env_node_id = tcx.hir().get_parent_item(hir_ty.hir_id);
let env_def_id = tcx.hir().local_def_id_from_hir_id(env_node_id);
@ -391,8 +391,8 @@ pub fn hir_ty_to_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir_ty: &hir::Ty) ->
pub fn hir_trait_to_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir_trait: &hir::TraitRef)
-> (ty::PolyTraitRef<'tcx>, Vec<(ty::PolyProjectionPredicate<'tcx>, Span)>) {
// In case there are any projections etc, find the "environment"
// def-id that will be used to determine the traits/predicates in
// In case there are any projections, etc., find the "environment"
// def-ID that will be used to determine the traits/predicates in
// scope. This is derived from the enclosing item-like thing.
let env_hir_id = tcx.hir().get_parent_item(hir_trait.hir_ref_id);
let env_def_id = tcx.hir().local_def_id_from_hir_id(env_hir_id);

View File

@ -2443,12 +2443,12 @@ pub struct PolyTrait {
pub generic_params: Vec<GenericParamDef>,
}
/// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original
/// type out of the AST/TyCtxt given one of these, if more information is needed. Most importantly
/// it does not preserve mutability or boxes.
/// A representation of a type suitable for hyperlinking purposes. Ideally, one can get the original
/// type out of the AST/`TyCtxt` given one of these, if more information is needed. Most
/// importantly, it does not preserve mutability or boxes.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum Type {
/// Structs/enums/traits (most that'd be an `hir::TyKind::Path`).
/// Structs/enums/traits (most that would be an `hir::TyKind::Path`).
ResolvedPath {
path: Path,
param_names: Option<Vec<GenericBound>>,
@ -2462,7 +2462,7 @@ pub enum Type {
/// Primitives are the fixed-size numeric types (plus int/usize/float), char,
/// arrays, slices, and tuples.
Primitive(PrimitiveType),
/// extern "ABI" fn
/// `extern "ABI" fn`
BareFunction(Box<BareFunctionDecl>),
Tuple(Vec<Type>),
Slice(Box<Type>),
@ -2477,17 +2477,17 @@ pub enum Type {
type_: Box<Type>,
},
// <Type as Trait>::Name
// `<Type as Trait>::Name`
QPath {
name: String,
self_type: Box<Type>,
trait_: Box<Type>
},
// _
// `_`
Infer,
// impl TraitA+TraitB
// `impl TraitA + TraitB + ...`
ImplTrait(Vec<GenericBound>),
}
@ -2747,7 +2747,6 @@ impl Clean<Type> for hir::Ty {
match self.node {
TyKind::Never => Never,
TyKind::CVarArgs(_) => CVarArgs,
TyKind::Ptr(ref m) => RawPointer(m.mutbl.clean(cx), box m.ty.clean(cx)),
TyKind::Rptr(ref l, ref m) => {
let lifetime = if l.is_elided() {
@ -2933,12 +2932,13 @@ impl Clean<Type> for hir::Ty {
}
ResolvedPath { path, param_names: Some(bounds), did, is_generic, }
}
_ => Infer // shouldn't happen
_ => Infer, // shouldn't happen
}
}
TyKind::BareFn(ref barefn) => BareFunction(box barefn.clean(cx)),
TyKind::Infer | TyKind::Err => Infer,
TyKind::Typeof(..) => panic!("Unimplemented type {:?}", self.node),
TyKind::CVarArgs(_) => CVarArgs,
}
}
}

View File

@ -18,7 +18,6 @@ use crate::core::DocAccessLevels;
use crate::html::item_type::ItemType;
use crate::html::render::{self, cache, CURRENT_LOCATION_KEY};
/// Helper to render an optional visibility with a space after it (if the
/// visibility is preset)
#[derive(Copy, Clone)]
@ -561,7 +560,7 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) ->
if param_names.is_some() {
f.write_str("dyn ")?;
}
// Paths like T::Output and Self::Output should be rendered with all segments
// Paths like `T::Output` and `Self::Output` should be rendered with all segments.
resolved_path(f, did, path, is_generic, use_absolute)?;
tybounds(f, param_names)
}
@ -585,7 +584,7 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) ->
&[] => primitive_link(f, PrimitiveType::Unit, "()"),
&[ref one] => {
primitive_link(f, PrimitiveType::Tuple, "(")?;
//carry f.alternate() into this display w/o branching manually
// Carry `f.alternate()` into this display w/o branching manually.
fmt::Display::fmt(one, f)?;
primitive_link(f, PrimitiveType::Tuple, ",)")
}
@ -638,7 +637,7 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) ->
"&amp;".to_string()
};
match **ty {
clean::Slice(ref bt) => { // BorrowedRef{ ... Slice(T) } is &[T]
clean::Slice(ref bt) => { // `BorrowedRef{ ... Slice(T) }` is `&[T]`
match **bt {
clean::Generic(_) => {
if f.alternate() {

View File

@ -171,7 +171,8 @@ fn default_hook(info: &PanicInfo<'_>) {
}
};
let location = info.location().unwrap(); // The current implementation always returns Some
// The current implementation always returns `Some`.
let location = info.location().unwrap();
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
@ -196,7 +197,7 @@ fn default_hook(info: &PanicInfo<'_>) {
if let Some(format) = log_backtrace {
let _ = backtrace::print(err, format);
} else if FIRST_PANIC.compare_and_swap(true, false, Ordering::SeqCst) {
let _ = writeln!(err, "note: Run with `RUST_BACKTRACE=1` \
let _ = writeln!(err, "note: run with `RUST_BACKTRACE=1` \
environment variable to display a backtrace.");
}
}

View File

@ -173,7 +173,7 @@ impl<'a, 'b> Printer<'a, 'b> {
Some(symbol) => {
match self.format {
PrintFormat::Full => write!(self.out, "{}", symbol)?,
// strip the trailing hash if short mode
// Strip the trailing hash if short mode.
PrintFormat::Short => write!(self.out, "{:#}", symbol)?,
}
}

View File

@ -213,7 +213,7 @@ pub struct ParenthesizedArgs {
/// Overall span
pub span: Span,
/// `(A,B)`
/// `(A, B)`
pub inputs: Vec<P<Ty>>,
/// `C`
@ -1840,7 +1840,7 @@ impl Arg {
}
}
/// Header (not the body) of a function declaration.
/// A header (not the body) of a function declaration.
///
/// E.g., `fn foo(bar: baz)`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]

View File

@ -10,7 +10,7 @@ use rustc_target::spec::abi::Abi;
use syntax_pos::{Pos, Span};
pub trait AstBuilder {
// paths
// Paths
fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path;
fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
@ -69,7 +69,7 @@ pub trait AstBuilder {
bounds: ast::GenericBounds)
-> ast::GenericParam;
// statements
// Statements
fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt;
fn stmt_semi(&self, expr: P<ast::Expr>) -> ast::Stmt;
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: P<ast::Expr>) -> ast::Stmt;
@ -83,11 +83,11 @@ pub trait AstBuilder {
fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt;
fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt;
// blocks
// Blocks
fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block>;
fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block>;
// expressions
// Expressions
fn expr(&self, span: Span, node: ast::ExprKind) -> P<ast::Expr>;
fn expr_path(&self, path: ast::Path) -> P<ast::Expr>;
fn expr_qpath(&self, span: Span, qself: ast::QSelf, path: ast::Path) -> P<ast::Expr>;
@ -194,12 +194,12 @@ pub trait AstBuilder {
fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>,
ident: ast::Ident) -> P<ast::Expr>;
// items
// Items
fn item(&self, span: Span,
name: Ident, attrs: Vec<ast::Attribute> , node: ast::ItemKind) -> P<ast::Item>;
fn arg(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::Arg;
// FIXME unused self
// FIXME: unused `self`
fn fn_decl(&self, inputs: Vec<ast::Arg> , output: ast::FunctionRetTy) -> P<ast::FnDecl>;
fn item_fn_poly(&self,
@ -552,7 +552,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
}
}
// Generate `let _: Type;`, usually used for type assertions.
// Generates `let _: Type;`, which is usually used for type assertions.
fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
let local = P(ast::Local {
pat: self.pat_wild(span),
@ -606,7 +606,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.expr(path.span, ast::ExprKind::Path(None, path))
}
/// Constructs a QPath expression.
/// Constructs a `QPath` expression.
fn expr_qpath(&self, span: Span, qself: ast::QSelf, path: ast::Path) -> P<ast::Expr> {
self.expr(span, ast::ExprKind::Path(Some(qself), path))
}
@ -736,7 +736,6 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.expr(sp, ast::ExprKind::Cast(expr, ty))
}
fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
self.expr_call_global(sp, some, vec![expr])
@ -748,12 +747,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.expr_path(none)
}
fn expr_break(&self, sp: Span) -> P<ast::Expr> {
self.expr(sp, ast::ExprKind::Break(None, None))
}
fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
self.expr(sp, ast::ExprKind::Tup(exprs))
}
@ -797,22 +794,22 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
let binding_pat = self.pat_ident(sp, binding_variable);
let binding_expr = self.expr_ident(sp, binding_variable);
// Ok(__try_var) pattern
// `Ok(__try_var)` pattern
let ok_pat = self.pat_tuple_struct(sp, ok_path, vec![binding_pat.clone()]);
// Err(__try_var) (pattern and expression resp.)
// `Err(__try_var)` (pattern and expression respectively)
let err_pat = self.pat_tuple_struct(sp, err_path.clone(), vec![binding_pat]);
let err_inner_expr = self.expr_call(sp, self.expr_path(err_path),
vec![binding_expr.clone()]);
// return Err(__try_var)
// `return Err(__try_var)`
let err_expr = self.expr(sp, ast::ExprKind::Ret(Some(err_inner_expr)));
// Ok(__try_var) => __try_var
// `Ok(__try_var) => __try_var`
let ok_arm = self.arm(sp, vec![ok_pat], binding_expr);
// Err(__try_var) => return Err(__try_var)
// `Err(__try_var) => return Err(__try_var)`
let err_arm = self.arm(sp, vec![err_pat], err_expr);
// match head { Ok() => ..., Err() => ... }
// `match head { Ok() => ..., Err() => ... }`
self.expr_match(sp, head, vec![ok_arm, err_arm])
}
@ -972,7 +969,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
}
}
// FIXME unused self
// FIXME: unused `self`
fn fn_decl(&self, inputs: Vec<ast::Arg>, output: ast::FunctionRetTy) -> P<ast::FnDecl> {
P(ast::FnDecl {
inputs,

View File

@ -191,24 +191,24 @@ enum PrevTokenKind {
Other,
}
/* ident is handled by common.rs */
// NOTE: `Ident`s are handled by `common.rs`.
#[derive(Clone)]
pub struct Parser<'a> {
pub sess: &'a ParseSess,
/// the current token:
/// The current token.
pub token: token::Token,
/// the span of the current token:
/// The span of the current token.
pub span: Span,
/// the span of the previous token:
meta_var_span: Option<Span>,
/// The span of the previous token.
pub prev_span: Span,
/// the previous token kind
/// The kind of the previous troken.
prev_token_kind: PrevTokenKind,
restrictions: Restrictions,
/// Used to determine the path to externally loaded source files
/// Used to determine the path to externally loaded source files.
crate directory: Directory<'a>,
/// Whether to parse sub-modules in other files.
/// `true` to parse sub-modules in other files.
pub recurse_into_file_modules: bool,
/// Name of the root module this parser originated from. If `None`, then the
/// name is not known. This does not change while the parser is descending
@ -217,7 +217,7 @@ pub struct Parser<'a> {
crate expected_tokens: Vec<TokenType>,
crate token_cursor: TokenCursor,
desugar_doc_comments: bool,
/// Whether we should configure out of line modules as we parse.
/// `true` we should configure out of line modules as we parse.
pub cfg_mods: bool,
/// This field is used to keep track of how many left angle brackets we have seen. This is
/// required in order to detect extra leading left angle brackets (`<` characters) and error
@ -2680,8 +2680,7 @@ impl<'a> Parser<'a> {
}
}
// parse a stream of tokens into a list of TokenTree's,
// up to EOF.
/// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
@ -5344,9 +5343,10 @@ impl<'a> Parser<'a> {
// Parse optional `for<'a, 'b>`.
// This `for` is parsed greedily and applies to the whole predicate,
// the bounded type can have its own `for` applying only to it.
// Example 1: for<'a> Trait1<'a>: Trait2<'a /*ok*/>
// Example 2: (for<'a> Trait1<'a>): Trait2<'a /*not ok*/>
// Example 3: for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /*ok*/, 'b /*not ok*/>
// Examples:
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
// Parse type with mandatory colon and (possibly empty) bounds,
@ -5478,17 +5478,17 @@ impl<'a> Parser<'a> {
this.look_ahead(n + 1, |t| t != &token::ModSep)
};
// Parse optional self parameter of a method.
// Only a limited set of initial token sequences is considered self parameters, anything
// Parse optional `self` parameter of a method.
// Only a limited set of initial token sequences is considered `self` parameters; anything
// else is parsed as a normal function parameter list, so some lookahead is required.
let eself_lo = self.span;
let (eself, eself_ident, eself_hi) = match self.token {
token::BinOp(token::And) => {
// &self
// &mut self
// &'lt self
// &'lt mut self
// &not_self
// `&self`
// `&mut self`
// `&'lt self`
// `&'lt mut self`
// `&not_self`
(if isolated_self(self, 1) {
self.bump();
SelfKind::Region(None, Mutability::Immutable)
@ -5514,10 +5514,10 @@ impl<'a> Parser<'a> {
}, expect_ident(self), self.prev_span)
}
token::BinOp(token::Star) => {
// *self
// *const self
// *mut self
// *not_self
// `*self`
// `*const self`
// `*mut self`
// `*not_self`
// Emit special error for `self` cases.
let msg = "cannot pass `self` by raw pointer";
(if isolated_self(self, 1) {
@ -5540,8 +5540,8 @@ impl<'a> Parser<'a> {
}
token::Ident(..) => {
if isolated_self(self, 0) {
// self
// self: TYPE
// `self`
// `self: TYPE`
let eself_ident = expect_ident(self);
let eself_hi = self.prev_span;
(if self.eat(&token::Colon) {
@ -5552,8 +5552,8 @@ impl<'a> Parser<'a> {
}, eself_ident, eself_hi)
} else if self.token.is_keyword(kw::Mut) &&
isolated_self(self, 1) {
// mut self
// mut self: TYPE
// `mut self`
// `mut self: TYPE`
self.bump();
let eself_ident = expect_ident(self);
let eself_hi = self.prev_span;
@ -5580,7 +5580,7 @@ impl<'a> Parser<'a> {
{
self.expect(&token::OpenDelim(token::Paren))?;
// Parse optional self argument
// Parse optional self argument.
let self_arg = self.parse_self_arg()?;
// Parse the rest of the function parameter list.

View File

@ -1715,7 +1715,7 @@ impl<'a> State<'a> {
match els {
Some(_else) => {
match _else.node {
// "another else-if"
// Another `else if` block.
ast::ExprKind::If(ref i, ref then, ref e) => {
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
@ -1725,7 +1725,7 @@ impl<'a> State<'a> {
self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "another else-if-let"
// Another `else if let` block.
ast::ExprKind::IfLet(ref pats, ref expr, ref then, ref e) => {
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
@ -1738,14 +1738,14 @@ impl<'a> State<'a> {
self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "final else"
// Final `else` block.
ast::ExprKind::Block(ref b, _) => {
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
self.s.word(" else ")?;
self.print_block(b)
}
// BLEAH, constraints would be great here
// Constraints would be great here!
_ => {
panic!("print_if saw if with weird alternative");
}

View File

@ -57,7 +57,8 @@ impl<T: 'static> P<T> {
{
f(*self.ptr)
}
/// Equivalent to and_then(|x| x)
/// Equivalent to `and_then(|x| x)`.
pub fn into_inner(self) -> T {
*self.ptr
}

View File

@ -499,7 +499,7 @@ pub fn walk_generic_param<'a, V: Visitor<'a>>(visitor: &mut V, param: &'a Generi
walk_list!(visitor, visit_attribute, param.attrs.iter());
walk_list!(visitor, visit_param_bound, &param.bounds);
match param.kind {
GenericParamKind::Lifetime => {}
GenericParamKind::Lifetime => (),
GenericParamKind::Type { ref default } => walk_list!(visitor, visit_ty, default),
GenericParamKind::Const { ref ty, .. } => visitor.visit_ty(ty),
}

View File

@ -922,8 +922,7 @@ impl<'a> MethodDef<'a> {
arg_types: Vec<(Ident, P<ast::Ty>)>,
body: P<Expr>)
-> ast::ImplItem {
// create the generics that aren't for Self
// Create the generics that aren't for `Self`.
let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics);
let args = {

View File

@ -245,8 +245,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
// If we find one, try to locate a `#[proc_macro_derive]` attribute on
// it.
// If we find one, try to locate a `#[proc_macro_derive]` attribute on it.
let is_fn = match item.node {
ast::ItemKind::Fn(..) => true,
_ => false,
@ -259,7 +258,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
if let Some(prev_attr) = found_attr {
let msg = if attr.path.segments[0].ident.name ==
prev_attr.path.segments[0].ident.name {
format!("Only one `#[{}]` attribute is allowed on any given function",
format!("only one `#[{}]` attribute is allowed on any given function",
attr.path)
} else {
format!("`#[{}]` and `#[{}]` attributes cannot both be applied \
@ -267,7 +266,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
};
self.handler.struct_span_err(attr.span, &msg)
.span_note(prev_attr.span, "Previous attribute here")
.span_note(prev_attr.span, "previous attribute here")
.emit();
return;

View File

@ -1,23 +1,21 @@
// ignore-cross-compile
// The general idea of this test is to enumerate all "interesting" expressions and check that
// `parse(print(e)) == e` for all `e`. Here's what's interesting, for the purposes of this test:
// `parse(print(e)) == e` for all `e`. Here's what's interesting, for the purposes of this test:
//
// 1. The test focuses on expression nesting, because interactions between different expression
// types are harder to test manually than single expression types in isolation.
// 1. The test focuses on expression nesting, because interactions between different expression
// types are harder to test manually than single expression types in isolation.
//
// 2. The test only considers expressions of at most two nontrivial nodes. So it will check `x +
// x` and `x + (x - x)` but not `(x * x) + (x - x)`. The assumption here is that the correct
// handling of an expression might depend on the expression's parent, but doesn't depend on its
// siblings or any more distant ancestors.
// 2. The test only considers expressions of at most two nontrivial nodes. So it will check `x +
// x` and `x + (x - x)` but not `(x * x) + (x - x)`. The assumption here is that the correct
// handling of an expression might depend on the expression's parent, but doesn't depend on its
// siblings or any more distant ancestors.
//
// 3. The test only checks certain expression kinds. The assumption is that similar expression
// types, such as `if` and `while` or `+` and `-`, will be handled identically in the printer
// and parser. So if all combinations of exprs involving `if` work correctly, then combinations
// 3. The test only checks certain expression kinds. The assumption is that similar expression
// types, such as `if` and `while` or `+` and `-`, will be handled identically in the printer
// and parser. So if all combinations of exprs involving `if` work correctly, then combinations
// using `while`, `if let`, and so on will likely work as well.
#![feature(rustc_private)]
extern crate rustc_data_structures;
@ -155,9 +153,9 @@ fn iter_exprs(depth: usize, f: &mut dyn FnMut(P<Expr>)) {
}
// Folders for manipulating the placement of `Paren` nodes. See below for why this is needed.
// Folders for manipulating the placement of `Paren` nodes. See below for why this is needed.
/// MutVisitor that removes all `ExprKind::Paren` nodes.
/// `MutVisitor` that removes all `ExprKind::Paren` nodes.
struct RemoveParens;
impl MutVisitor for RemoveParens {
@ -171,7 +169,7 @@ impl MutVisitor for RemoveParens {
}
/// MutVisitor that inserts `ExprKind::Paren` nodes around every `Expr`.
/// `MutVisitor` that inserts `ExprKind::Paren` nodes around every `Expr`.
struct AddParens;
impl MutVisitor for AddParens {
@ -205,8 +203,8 @@ fn run() {
// We want to know if `parsed` is structurally identical to `e`, ignoring trivial
// differences like placement of `Paren`s or the exact ranges of node spans.
// Unfortunately, there is no easy way to make this comparison. Instead, we add `Paren`s
// everywhere we can, then pretty-print. This should give an unambiguous representation of
// Unfortunately, there is no easy way to make this comparison. Instead, we add `Paren`s
// everywhere we can, then pretty-print. This should give an unambiguous representation of
// each `Expr`, and it bypasses nearly all of the parenthesization logic, so we aren't
// relying on the correctness of the very thing we're testing.
RemoveParens.visit_expr(&mut e);

View File

@ -1,14 +1,15 @@
// run-pass
#![allow(unused_assignments)]
// Test that duplicate auto trait bounds in trait objects don't create new types.
#[allow(unused_assignments)]
use std::marker::Send as SendAlias;
// A dummy trait for the non-auto trait.
trait Trait {}
// A dummy struct to implement Trait, Send, and .
// A dummy struct to implement `Trait` and `Send`.
struct Struct;
impl Trait for Struct {}
@ -23,12 +24,12 @@ impl dyn Trait + Send + Send {
}
fn main() {
// 1. Moving into a variable with more Sends and back.
// 1. Moving into a variable with more `Send`s and back.
let mut dyn_trait_send = Box::new(Struct) as Box<dyn Trait + Send>;
let dyn_trait_send_send: Box<dyn Trait + Send + Send> = dyn_trait_send;
dyn_trait_send = dyn_trait_send_send;
// 2. Calling methods with different number of Sends.
// 2. Calling methods with different number of `Send`s.
let dyn_trait_send = Box::new(Struct) as Box<dyn Trait + Send>;
takes_dyn_trait_send_send(dyn_trait_send);

View File

@ -9,7 +9,7 @@ mod boo {
}
}
// don't actually know the type here
// We don't actually know the type here.
fn bomp2() {
let _: &str = bomp(); //~ ERROR mismatched types

View File

@ -1,44 +1,44 @@
// Test ignored_generic_bounds lint warning about bounds in type aliases
// Test `ignored_generic_bounds` lint warning about bounds in type aliases.
// compile-pass
#![allow(dead_code)]
use std::rc::Rc;
type SVec<T: Send+Send> = Vec<T>;
type SVec<T: Send + Send> = Vec<T>;
//~^ WARN bounds on generic parameters are not enforced in type aliases [type_alias_bounds]
type S2Vec<T> where T: Send = Vec<T>;
//~^ WARN where clauses are not enforced in type aliases [type_alias_bounds]
type VVec<'b, 'a: 'b+'b> = (&'b u32, Vec<&'a i32>);
type VVec<'b, 'a: 'b + 'b> = (&'b u32, Vec<&'a i32>);
//~^ WARN bounds on generic parameters are not enforced in type aliases [type_alias_bounds]
type WVec<'b, T: 'b+'b> = (&'b u32, Vec<T>);
type WVec<'b, T: 'b + 'b> = (&'b u32, Vec<T>);
//~^ WARN bounds on generic parameters are not enforced in type aliases [type_alias_bounds]
type W2Vec<'b, T> where T: 'b, T: 'b = (&'b u32, Vec<T>);
//~^ WARN where clauses are not enforced in type aliases [type_alias_bounds]
static STATIC : u32 = 0;
static STATIC: u32 = 0;
fn foo<'a>(y: &'a i32) {
// If any of the bounds above would matter, the code below would be rejected.
// This can be seen when replacing the type aliases above by newtype structs.
// (The type aliases have no unused parameters to make that a valid transformation.)
let mut x : SVec<_> = Vec::new();
let mut x: SVec<_> = Vec::new();
x.push(Rc::new(42)); // is not send
let mut x : S2Vec<_> = Vec::new();
x.push(Rc::new(42)); // is not send
let mut x: S2Vec<_> = Vec::new();
x.push(Rc::new(42)); // is not `Send`
let mut x : VVec<'static, 'a> = (&STATIC, Vec::new());
x.1.push(y); // 'a: 'static does not hold
let mut x: VVec<'static, 'a> = (&STATIC, Vec::new());
x.1.push(y); // `'a: 'static` does not hold
let mut x : WVec<'static, &'a i32> = (&STATIC, Vec::new());
x.1.push(y); // &'a i32: 'static does not hold
let mut x: WVec<'static, &'a i32> = (&STATIC, Vec::new());
x.1.push(y); // `&'a i32: 'static` does not hold
let mut x : W2Vec<'static, &'a i32> = (&STATIC, Vec::new());
x.1.push(y); // &'a i32: 'static does not hold
let mut x: W2Vec<'static, &'a i32> = (&STATIC, Vec::new());
x.1.push(y); // `&'a i32: 'static` does not hold
}
// Bounds are not checked either, i.e., the definition is not necessarily well-formed
// Bounds are not checked either; i.e., the definition is not necessarily well-formed.
struct Sendable<T: Send>(T);
type MySendable<T> = Sendable<T>; // no error here!
@ -47,9 +47,9 @@ trait Bound { type Assoc; }
type T1<U: Bound> = U::Assoc; //~ WARN not enforced in type aliases
type T2<U> where U: Bound = U::Assoc; //~ WARN not enforced in type aliases
// This errors
// type T3<U> = U::Assoc;
// Do this instead
// This errors:
// `type T3<U> = U::Assoc;`
// Do this instead:
type T4<U> = <U as Bound>::Assoc;
// Make sure the help about associatd types is not shown incorrectly

View File

@ -1,8 +1,18 @@
warning: duplicate auto trait `::marker[0]::Send[0]` found in type parameter bounds
--> $DIR/type-alias-bounds.rs:8:14
|
LL | type SVec<T: Send + Send> = Vec<T>;
| ^^^^ ^^^^ subsequent use of auto trait
| |
| first use of auto trait
|
= note: #[warn(duplicate_auto_traits_in_bounds)] on by default
warning: bounds on generic parameters are not enforced in type aliases
--> $DIR/type-alias-bounds.rs:8:14
|
LL | type SVec<T: Send+Send> = Vec<T>;
| ^^^^ ^^^^
LL | type SVec<T: Send + Send> = Vec<T>;
| ^^^^ ^^^^
|
= note: #[warn(type_alias_bounds)] on by default
= help: the bound will not be checked when the type alias is used, and should be removed
@ -18,16 +28,16 @@ LL | type S2Vec<T> where T: Send = Vec<T>;
warning: bounds on generic parameters are not enforced in type aliases
--> $DIR/type-alias-bounds.rs:12:19
|
LL | type VVec<'b, 'a: 'b+'b> = (&'b u32, Vec<&'a i32>);
| ^^ ^^
LL | type VVec<'b, 'a: 'b + 'b> = (&'b u32, Vec<&'a i32>);
| ^^ ^^
|
= help: the bound will not be checked when the type alias is used, and should be removed
warning: bounds on generic parameters are not enforced in type aliases
--> $DIR/type-alias-bounds.rs:14:18
|
LL | type WVec<'b, T: 'b+'b> = (&'b u32, Vec<T>);
| ^^ ^^
LL | type WVec<'b, T: 'b + 'b> = (&'b u32, Vec<T>);
| ^^ ^^
|
= help: the bound will not be checked when the type alias is used, and should be removed