Auto merge of #73235 - Dylan-DPC:rollup-zp8oxhg, r=Dylan-DPC

Rollup of 11 pull requests

Successful merges:

 - #72380 (Fix `is_const_context`, update `check_for_cast`)
 - #72941 (Ensure stack when building MIR for matches)
 - #72976 (Clean up E0642 explanation)
 - #73080 (doc/rustdoc: Fix incorrect external_doc feature flag)
 - #73155 (save_analysis: better handle paths and functions signature)
 - #73164 (Add new E0762 error code)
 - #73172 (Fix more clippy warnings)
 - #73181 (Automatically prioritize unsoundness issues)
 - #73183 (Support proc macros in intra doc link resolution)
 - #73208 (Fix doctest template)
 - #73219 (x.py: with --json-output, forward cargo's JSON)

Failed merges:

r? @ghost
This commit is contained in:
bors 2020-06-11 11:17:37 +00:00
commit 50c0192c64
44 changed files with 5815 additions and 423 deletions

View File

@ -983,7 +983,13 @@ pub fn stream_cargo(
for line in stdout.lines() {
let line = t!(line);
match serde_json::from_str::<CargoMessage<'_>>(&line) {
Ok(msg) => cb(msg),
Ok(msg) => {
if builder.config.json_output {
// Forward JSON to stdout.
println!("{}", line);
}
cb(msg)
}
// If this was informational, just print it out and continue
Err(_) => println!("{}", line),
}

View File

@ -416,7 +416,7 @@ without including it in your main documentation. For example, you could write th
`lib.rs` to test your README as part of your doctests:
```rust,ignore
#![feature(extern_doc)]
#![feature(external_doc)]
#[doc(include="../README.md")]
#[cfg(doctest)]

View File

@ -3309,7 +3309,8 @@ Basic usage:
```
", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_add(1), 101);
assert_eq!(200u8.saturating_add(127), 255);", $EndFeature, "
assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);",
$EndFeature, "
```"),
#[stable(feature = "rust1", since = "1.0.0")]

View File

@ -392,7 +392,7 @@ impl TokenStream {
break;
}
}
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
token_trees = out.into_iter().map(TokenTree::Token).collect();
if token_trees.len() != 1 {
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
}

View File

@ -1237,10 +1237,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
) => {
assert!(!*late);
let out_op_sp = if input { op_sp2 } else { op_sp };
let msg = &format!(
"use `lateout` instead of \
`out` to avoid conflict"
);
let msg = "use `lateout` instead of \
`out` to avoid conflict";
err.span_help(out_op_sp, msg);
}
_ => {}

View File

@ -457,7 +457,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
let mut chars = arg.format.ty.chars();
let mut modifier = chars.next();
if !chars.next().is_none() {
if chars.next().is_some() {
let span = arg
.format
.ty_span

View File

@ -63,7 +63,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
.tcx()
.destructure_const(ty::ParamEnv::reveal_all().and(&c))
.fields
.into_iter()
.iter()
.map(|field| {
if let Some(prim) = field.val.try_to_scalar() {
let layout = bx.layout_of(field_ty);

View File

@ -440,6 +440,7 @@ E0754: include_str!("./error_codes/E0754.md"),
E0758: include_str!("./error_codes/E0758.md"),
E0760: include_str!("./error_codes/E0760.md"),
E0761: include_str!("./error_codes/E0761.md"),
E0762: include_str!("./error_codes/E0762.md"),
;
// E0006, // merged with E0005
// E0008, // cannot bind by-move into a pattern guard

View File

@ -1,6 +1,6 @@
Trait methods currently cannot take patterns as arguments.
Example of erroneous code:
Erroneous code example:
```compile_fail,E0642
trait Foo {

View File

@ -0,0 +1,13 @@
A character literal wasn't ended with a quote.
Erroneous code example:
```compile_fail,E0762
static C: char = '●; // error!
```
To fix this error, add the missing quote:
```
static C: char = '●'; // ok!
```

View File

@ -159,14 +159,10 @@ impl AnnotateSnippetEmitterWriter {
// FIXME(#59346): Not really sure when `fold` should be true or false
fold: false,
annotations: annotations
.into_iter()
.iter()
.map(|annotation| SourceAnnotation {
range: (annotation.start_col, annotation.end_col),
label: annotation
.label
.as_ref()
.map(|s| s.as_str())
.unwrap_or_default(),
label: annotation.label.as_deref().unwrap_or_default(),
annotation_type: annotation_type_for_level(*level),
})
.collect(),

View File

@ -227,6 +227,28 @@ pub fn path_to_string(segment: &hir::Path<'_>) -> String {
to_string(NO_ANN, |s| s.print_path(segment, false))
}
pub fn fn_to_string(
decl: &hir::FnDecl<'_>,
header: hir::FnHeader,
name: Option<Symbol>,
generics: &hir::Generics<'_>,
vis: &hir::Visibility<'_>,
arg_names: &[Ident],
body_id: Option<hir::BodyId>,
) -> String {
to_string(NO_ANN, |s| s.print_fn(decl, header, name, generics, vis, arg_names, body_id))
}
pub fn enum_def_to_string(
enum_definition: &hir::EnumDef<'_>,
generics: &hir::Generics<'_>,
name: Symbol,
span: rustc_span::Span,
visibility: &hir::Visibility<'_>,
) -> String {
to_string(NO_ANN, |s| s.print_enum_def(enum_definition, generics, name, span, visibility))
}
impl<'a> State<'a> {
pub fn cbox(&mut self, u: usize) {
self.s.cbox(u);

View File

@ -550,7 +550,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
let error_code = error_code.into();
let mut err = self.tcx.sess.struct_span_err_with_code(
local_visitor.target_span,
&format!("type annotations needed"),
"type annotations needed",
error_code,
);

View File

@ -77,8 +77,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
}
_ => {}
}
let mut type_param_span: MultiSpan =
visitor.types.iter().cloned().collect::<Vec<_>>().into();
let mut type_param_span: MultiSpan = visitor.types.to_vec().into();
for &span in &visitor.types {
type_param_span.push_span_label(
span,

View File

@ -187,9 +187,9 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
// Ok, this is a shebang but if the next non-whitespace token is `[` or maybe
// a doc comment (due to `TokenKind::(Line,Block)Comment` ambiguity at lexer level),
// then it may be valid Rust code, so consider it Rust code.
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).filter(|tok|
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok|
!matches!(tok, TokenKind::Whitespace | TokenKind::LineComment | TokenKind::BlockComment { .. })
).next();
);
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
// No other choice than to consider this a shebang.
return Some(2 + first_line_tail.len());

View File

@ -335,6 +335,16 @@ impl<'hir> Map<'hir> {
}
}
pub fn enclosing_body_owner(&self, hir_id: HirId) -> HirId {
for (parent, _) in self.parent_iter(hir_id) {
if let Some(body) = self.maybe_body_owned_by(parent) {
return self.body_owner(body);
}
}
bug!("no `enclosing_body_owner` for hir_id `{}`", hir_id);
}
/// Returns the `HirId` that corresponds to the definition of
/// which this is the body of, i.e., a `fn`, `const` or `static`
/// item (possibly associated), a closure, or a `hir::AnonConst`.
@ -537,18 +547,8 @@ impl<'hir> Map<'hir> {
/// Whether the expression pointed at by `hir_id` belongs to a `const` evaluation context.
/// Used exclusively for diagnostics, to avoid suggestion function calls.
pub fn is_const_context(&self, hir_id: HirId) -> bool {
let parent_id = self.get_parent_item(hir_id);
match self.get(parent_id) {
Node::Item(&Item { kind: ItemKind::Const(..) | ItemKind::Static(..), .. })
| Node::TraitItem(&TraitItem { kind: TraitItemKind::Const(..), .. })
| Node::ImplItem(&ImplItem { kind: ImplItemKind::Const(..), .. })
| Node::AnonConst(_) => true,
Node::Item(&Item { kind: ItemKind::Fn(ref sig, ..), .. }) => {
sig.header.constness == Constness::Const
}
_ => false,
}
pub fn is_inside_const_context(&self, hir_id: HirId) -> bool {
self.body_const_context(self.local_def_id(self.enclosing_body_owner(hir_id))).is_some()
}
/// Whether `hir_id` corresponds to a `mod` or a crate.

View File

@ -495,7 +495,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
let closure_id = hir.as_local_hir_id(self.mir_def_id);
let fn_call_id = hir.get_parent_node(closure_id);
let node = hir.get(fn_call_id);
let item_id = hir.get_parent_item(fn_call_id);
let item_id = hir.enclosing_body_owner(fn_call_id);
let mut look_at_return = true;
// If we can detect the expression to be an `fn` call where the closure was an argument,
// we point at the `fn` definition argument...

View File

@ -309,9 +309,7 @@ pub fn const_eval_raw_provider<'tcx>(
let res = ecx.load_mir(cid.instance.def, cid.promoted);
res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body))
.and_then(|place| {
Ok(RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
})
.map(|place| RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
.map_err(|error| {
let err = error_to_const_error(&ecx, error);
// errors in statics are always emitted as fatal errors

View File

@ -51,7 +51,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PackedRefChecker<'a, 'tcx> {
lint_root,
source_info.span,
|lint| {
lint.build(&format!("reference to packed field is unaligned",))
lint.build("reference to packed field is unaligned")
.note(
"fields of packed structs are not properly aligned, and creating \
a misaligned reference is undefined behavior (even if that \

View File

@ -111,7 +111,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option<Local> {
copied_to_return_place = Some(returned_local);
}
return copied_to_return_place;
copied_to_return_place
}
fn find_local_assigned_to_return_place(
@ -136,7 +136,7 @@ fn find_local_assigned_to_return_place(
}
}
return None;
None
}
// If this statement is an assignment of an unprojected local to the return place,

View File

@ -99,7 +99,7 @@ fn get_arm_identity_info<'a, 'tcx>(stmts: &'a [Statement<'tcx>]) -> Option<ArmId
fn try_eat<'a, 'tcx>(
stmt_iter: &mut StmtIter<'a, 'tcx>,
test: impl Fn(&'a Statement<'tcx>) -> bool,
mut action: impl FnMut(usize, &'a Statement<'tcx>) -> (),
mut action: impl FnMut(usize, &'a Statement<'tcx>),
) {
while stmt_iter.peek().map(|(_, stmt)| test(stmt)).unwrap_or(false) {
let (idx, stmt) = stmt_iter.next().unwrap();
@ -271,7 +271,7 @@ fn optimization_applies<'tcx>(
}
// Verify the assigment chain consists of the form b = a; c = b; d = c; etc...
if opt_info.field_tmp_assignments.len() == 0 {
if opt_info.field_tmp_assignments.is_empty() {
trace!("NO: no assignments found");
}
let mut last_assigned_to = opt_info.field_tmp_assignments[0].1;

View File

@ -10,7 +10,7 @@ use crate::build::ForGuard::{self, OutsideGuard, RefWithinGuard};
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode};
use crate::hair::{self, *};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::{fx::{FxHashMap, FxHashSet}, stack::ensure_sufficient_stack};
use rustc_hir::HirId;
use rustc_index::bit_set::BitSet;
use rustc_middle::middle::region;
@ -909,30 +909,32 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
split_or_candidate |= self.simplify_candidate(candidate);
}
if split_or_candidate {
// At least one of the candidates has been split into subcandidates.
// We need to change the candidate list to include those.
let mut new_candidates = Vec::new();
ensure_sufficient_stack(|| {
if split_or_candidate {
// At least one of the candidates has been split into subcandidates.
// We need to change the candidate list to include those.
let mut new_candidates = Vec::new();
for candidate in candidates {
candidate.visit_leaves(|leaf_candidate| new_candidates.push(leaf_candidate));
for candidate in candidates {
candidate.visit_leaves(|leaf_candidate| new_candidates.push(leaf_candidate));
}
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
&mut *new_candidates,
fake_borrows,
);
} else {
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
candidates,
fake_borrows,
);
}
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
&mut *new_candidates,
fake_borrows,
);
} else {
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
candidates,
fake_borrows,
);
};
});
}
fn match_simplified_candidates(

View File

@ -325,7 +325,15 @@ impl<'a> StringReader<'a> {
let (lit_kind, mode, prefix_len, postfix_len) = match kind {
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
self.fatal_span_(start, suffix_start, "unterminated character literal").raise()
self.sess
.span_diagnostic
.struct_span_fatal_with_code(
self.mk_sp(start, suffix_start),
"unterminated character literal",
error_code!(E0762),
)
.emit();
FatalError.raise();
}
(token::Char, Mode::Char, 1, 1) // ' '
}
@ -401,7 +409,7 @@ impl<'a> StringReader<'a> {
let content_end = suffix_start - BytePos(postfix_len);
let id = self.symbol_from_to(content_start, content_end);
self.validate_literal_escape(mode, content_start, content_end);
return (lit_kind, id);
(lit_kind, id)
}
pub fn pos(&self) -> BytePos {

View File

@ -936,7 +936,7 @@ impl<'a> Parser<'a> {
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
// The current token is in the same line as the prior token, not recoverable.
} else if [token::Comma, token::Colon].contains(&self.token.kind)
&& &self.prev_token.kind == &token::CloseDelim(token::Paren)
&& self.prev_token.kind == token::CloseDelim(token::Paren)
{
// Likely typo: The current token is on a new line and is expected to be
// `.`, `;`, `?`, or an operator after a close delimiter token.

View File

@ -193,7 +193,7 @@ impl TokenCursor {
tree,
self.stack.len()
);
collecting.buf.push(tree.clone().into())
collecting.buf.push(tree.clone())
}
}
@ -675,7 +675,7 @@ impl<'a> Parser<'a> {
// If this was a missing `@` in a binding pattern
// bail with a suggestion
// https://github.com/rust-lang/rust/issues/72373
if self.prev_token.is_ident() && &self.token.kind == &token::DotDot {
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
let msg = format!(
"if you meant to bind the contents of \
the rest of the array pattern into `{}`, use `@`",
@ -1193,7 +1193,7 @@ impl<'a> Parser<'a> {
let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() {
collecting.buf
} else {
let msg = format!("our vector went away?");
let msg = "our vector went away?";
debug!("collect_tokens: {}", msg);
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
// This can happen due to a bad interaction of two unrelated recovery mechanisms

View File

@ -232,7 +232,7 @@ impl ExprVisitor<'tcx> {
// size).
if let Some((in_expr, Some(in_asm_ty))) = tied_input {
if in_asm_ty != asm_ty {
let msg = &format!("incompatible types for asm inout argument");
let msg = "incompatible types for asm inout argument";
let mut err = self.tcx.sess.struct_span_err(vec![in_expr.span, expr.span], msg);
err.span_label(
in_expr.span,

View File

@ -20,7 +20,7 @@ use rustc_hir as hir;
use rustc_hir::def::{DefKind as HirDefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir_pretty::{bounds_to_string, generic_params_to_string, ty_to_string};
use rustc_hir_pretty::{bounds_to_string, fn_to_string, generic_params_to_string, ty_to_string};
use rustc_middle::hir::map::Map;
use rustc_middle::span_bug;
use rustc_middle::ty::{self, DefIdTree, TyCtxt};
@ -199,23 +199,23 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
self.dumper.compilation_opts(data);
}
fn write_sub_paths(&mut self, path: &'tcx hir::Path<'tcx>) {
for seg in path.segments {
fn write_segments(&mut self, segments: impl IntoIterator<Item = &'tcx hir::PathSegment<'tcx>>) {
for seg in segments {
if let Some(data) = self.save_ctxt.get_path_segment_data(seg) {
self.dumper.dump_ref(data);
}
}
}
fn write_sub_paths(&mut self, path: &'tcx hir::Path<'tcx>) {
self.write_segments(path.segments)
}
// As write_sub_paths, but does not process the last ident in the path (assuming it
// will be processed elsewhere). See note on write_sub_paths about global.
fn write_sub_paths_truncated(&mut self, path: &'tcx hir::Path<'tcx>) {
if let [segments @ .., _] = path.segments {
for seg in segments {
if let Some(data) = self.save_ctxt.get_path_segment_data(seg) {
self.dumper.dump_ref(data);
}
}
self.write_segments(segments)
}
}
@ -276,7 +276,8 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
}
v.process_generic_params(&generics, &method_data.qualname, hir_id);
method_data.value = crate::make_signature(&sig.decl, &generics);
method_data.value =
fn_to_string(sig.decl, sig.header, Some(ident.name), generics, vis, &[], None);
method_data.sig = sig::method_signature(hir_id, ident, generics, sig, &v.save_ctxt);
v.dumper.dump_def(&access_from_vis!(v.save_ctxt, vis, hir_id), method_data);
@ -643,7 +644,7 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
self.nest_tables(map.local_def_id(item.hir_id), |v| {
v.visit_ty(&typ);
if let &Some(ref trait_ref) = trait_ref {
v.process_path(trait_ref.hir_ref_id, &trait_ref.path);
v.process_path(trait_ref.hir_ref_id, &hir::QPath::Resolved(None, &trait_ref.path));
}
v.process_generic_params(generics, "", item.hir_id);
for impl_item in impl_items {
@ -746,7 +747,7 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
}
}
fn dump_path_ref(&mut self, id: hir::HirId, path: &hir::Path<'tcx>) {
fn dump_path_ref(&mut self, id: hir::HirId, path: &hir::QPath<'tcx>) {
let path_data = self.save_ctxt.get_path_data(id, path);
if let Some(path_data) = path_data {
self.dumper.dump_ref(path_data);
@ -760,14 +761,30 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
}
}
fn process_path(&mut self, id: hir::HirId, path: &'tcx hir::Path<'tcx>) {
if self.span.filter_generated(path.span) {
fn process_path(&mut self, id: hir::HirId, path: &hir::QPath<'tcx>) {
let span = match path {
hir::QPath::Resolved(_, path) => path.span,
hir::QPath::TypeRelative(_, segment) => segment.ident.span,
};
if self.span.filter_generated(span) {
return;
}
self.dump_path_ref(id, path);
// Type arguments
for seg in path.segments {
let segments = match path {
hir::QPath::Resolved(ty, path) => {
if let Some(ty) = ty {
self.visit_ty(ty);
}
path.segments
}
hir::QPath::TypeRelative(ty, segment) => {
self.visit_ty(ty);
std::slice::from_ref(*segment)
}
};
for seg in segments {
if let Some(ref generic_args) = seg.args {
for arg in generic_args.args {
if let hir::GenericArg::Type(ref ty) = arg {
@ -777,7 +794,9 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
}
}
self.write_sub_paths_truncated(path);
if let hir::QPath::Resolved(_, path) = path {
self.write_sub_paths_truncated(path);
}
}
fn process_struct_lit(
@ -931,9 +950,7 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
}
for (id, ref path) in collector.collected_paths {
if let hir::QPath::Resolved(_, path) = path {
self.process_path(id, path);
}
self.process_path(id, path);
}
}
@ -1135,7 +1152,10 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
fn process_bounds(&mut self, bounds: hir::GenericBounds<'tcx>) {
for bound in bounds {
if let hir::GenericBound::Trait(ref trait_ref, _) = *bound {
self.process_path(trait_ref.trait_ref.hir_ref_id, &trait_ref.trait_ref.path)
self.process_path(
trait_ref.trait_ref.hir_ref_id,
&hir::QPath::Resolved(None, &trait_ref.trait_ref.path),
)
}
}
}
@ -1330,13 +1350,16 @@ impl<'l, 'tcx> Visitor<'tcx> for DumpVisitor<'l, 'tcx> {
fn visit_ty(&mut self, t: &'tcx hir::Ty<'tcx>) {
self.process_macro_use(t.span);
match t.kind {
hir::TyKind::Path(hir::QPath::Resolved(_, path)) => {
hir::TyKind::Path(ref path) => {
if generated_code(t.span) {
return;
}
if let Some(id) = self.lookup_def_id(t.hir_id) {
let sub_span = path.segments.last().unwrap().ident.span;
let sub_span = match path {
hir::QPath::Resolved(_, path) => path.segments.last().unwrap().ident.span,
hir::QPath::TypeRelative(_, segment) => segment.ident.span,
};
let span = self.span_from_span(sub_span);
self.dumper.dump_ref(Ref {
kind: RefKind::Type,
@ -1345,8 +1368,10 @@ impl<'l, 'tcx> Visitor<'tcx> for DumpVisitor<'l, 'tcx> {
});
}
self.write_sub_paths_truncated(path);
intravisit::walk_path(self, path);
if let hir::QPath::Resolved(_, path) = path {
self.write_sub_paths_truncated(path);
}
intravisit::walk_qpath(self, path, t.hir_id, t.span);
}
hir::TyKind::Array(ref ty, ref anon_const) => {
self.visit_ty(ty);
@ -1355,6 +1380,10 @@ impl<'l, 'tcx> Visitor<'tcx> for DumpVisitor<'l, 'tcx> {
v.visit_expr(&map.body(anon_const.body).value)
});
}
hir::TyKind::Def(item_id, _) => {
let item = self.tcx.hir().item(item_id.id);
self.nest_tables(self.tcx.hir().local_def_id(item_id.id), |v| v.visit_item(item));
}
_ => intravisit::walk_ty(self, t),
}
}
@ -1432,8 +1461,8 @@ impl<'l, 'tcx> Visitor<'tcx> for DumpVisitor<'l, 'tcx> {
self.visit_expr(&arm.body);
}
fn visit_path(&mut self, p: &'tcx hir::Path<'tcx>, id: hir::HirId) {
self.process_path(id, p);
fn visit_qpath(&mut self, path: &'tcx hir::QPath<'tcx>, id: hir::HirId, _: Span) {
self.process_path(id, path);
}
fn visit_stmt(&mut self, s: &'tcx hir::Stmt<'tcx>) {

View File

@ -13,11 +13,11 @@ use rustc_ast::ast::{self};
use rustc_ast::util::comments::strip_doc_comment_decoration;
use rustc_ast_pretty::pprust::attribute_to_string;
use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind as HirDefKind, Res};
use rustc_hir::def::{DefKind as HirDefKind, Res};
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::Node;
use rustc_hir_pretty::ty_to_string;
use rustc_hir_pretty::{enum_def_to_string, fn_to_string, ty_to_string};
use rustc_middle::hir::map::Map;
use rustc_middle::middle::cstore::ExternCrate;
use rustc_middle::middle::privacy::AccessLevels;
@ -135,7 +135,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
let def_id = self.tcx.hir().local_def_id(item.hir_id).to_def_id();
let qualname = format!("::{}", self.tcx.def_path_str(def_id));
match item.kind {
hir::ForeignItemKind::Fn(ref decl, _, ref generics) => {
hir::ForeignItemKind::Fn(ref decl, arg_names, ref generics) => {
filter!(self.span_utils, item.ident.span);
Some(Data::DefData(Def {
@ -144,7 +144,23 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
span: self.span_from_span(item.ident.span),
name: item.ident.to_string(),
qualname,
value: make_signature(decl, generics),
value: fn_to_string(
decl,
hir::FnHeader {
// functions in extern block are implicitly unsafe
unsafety: hir::Unsafety::Unsafe,
// functions in extern block cannot be const
constness: hir::Constness::NotConst,
abi: self.tcx.hir().get_foreign_abi(item.hir_id),
// functions in extern block cannot be async
asyncness: hir::IsAsync::NotAsync,
},
Some(item.ident.name),
generics,
&item.vis,
arg_names,
None,
),
parent: None,
children: vec![],
decl_id: None,
@ -191,7 +207,15 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
span: self.span_from_span(item.ident.span),
name: item.ident.to_string(),
qualname,
value: make_signature(&sig.decl, generics),
value: fn_to_string(
sig.decl,
sig.header,
Some(item.ident.name),
generics,
&item.vis,
&[],
None,
),
parent: None,
children: vec![],
decl_id: None,
@ -268,13 +292,12 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
attributes: lower_attributes(item.attrs.to_vec(), self),
}))
}
hir::ItemKind::Enum(ref def, _) => {
hir::ItemKind::Enum(ref def, ref generics) => {
let name = item.ident.to_string();
let qualname = format!("::{}", self.tcx.def_path_str(def_id));
filter!(self.span_utils, item.ident.span);
let variants_str =
def.variants.iter().map(|v| v.ident.to_string()).collect::<Vec<_>>().join(", ");
let value = format!("{}::{{{}}}", name, variants_str);
let value =
enum_def_to_string(def, generics, item.ident.name, item.span, &item.vis);
Some(Data::DefData(Def {
kind: DefKind::Enum,
id: id_from_def_id(def_id),
@ -579,7 +602,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
ref_id: def_id.or(decl_id).map(id_from_def_id).unwrap_or_else(null_id),
}))
}
hir::ExprKind::Path(hir::QPath::Resolved(_, path)) => {
hir::ExprKind::Path(ref path) => {
self.get_path_data(expr.hir_id, path).map(Data::RefData)
}
_ => {
@ -631,8 +654,12 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
}
}
pub fn get_path_data(&self, id: hir::HirId, path: &hir::Path<'_>) -> Option<Ref> {
path.segments.last().and_then(|seg| {
pub fn get_path_data(&self, id: hir::HirId, path: &hir::QPath<'_>) -> Option<Ref> {
let segment = match path {
hir::QPath::Resolved(_, path) => path.segments.last(),
hir::QPath::TypeRelative(_, segment) => Some(*segment),
};
segment.and_then(|seg| {
self.get_path_segment_data(seg).or_else(|| self.get_path_segment_data_with_id(seg, id))
})
}
@ -681,20 +708,16 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
Res::Def(HirDefKind::ConstParam, def_id) => {
Some(Ref { kind: RefKind::Variable, span, ref_id: id_from_def_id(def_id) })
}
Res::Def(HirDefKind::Ctor(CtorOf::Struct, ..), def_id) => {
// This is a reference to a tuple struct where the def_id points
Res::Def(HirDefKind::Ctor(_, ..), def_id) => {
// This is a reference to a tuple struct or an enum variant where the def_id points
// to an invisible constructor function. That is not a very useful
// def, so adjust to point to the tuple struct itself.
// def, so adjust to point to the tuple struct or enum variant itself.
let parent_def_id = self.tcx.parent(def_id).unwrap();
Some(Ref { kind: RefKind::Type, span, ref_id: id_from_def_id(parent_def_id) })
}
Res::Def(
HirDefKind::Static
| HirDefKind::Const
| HirDefKind::AssocConst
| HirDefKind::Ctor(..),
_,
) => Some(Ref { kind: RefKind::Variable, span, ref_id: id_from_def_id(res.def_id()) }),
Res::Def(HirDefKind::Static | HirDefKind::Const | HirDefKind::AssocConst, _) => {
Some(Ref { kind: RefKind::Variable, span, ref_id: id_from_def_id(res.def_id()) })
}
Res::Def(HirDefKind::AssocFn, decl_id) => {
let def_id = if decl_id.is_local() {
let ti = self.tcx.associated_item(decl_id);
@ -844,31 +867,6 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
}
}
fn make_signature(decl: &hir::FnDecl<'_>, generics: &hir::Generics<'_>) -> String {
let mut sig = "fn ".to_owned();
if !generics.params.is_empty() {
sig.push('<');
sig.push_str(
&generics
.params
.iter()
.map(|param| param.name.ident().to_string())
.collect::<Vec<_>>()
.join(", "),
);
sig.push_str("> ");
}
sig.push('(');
sig.push_str(&decl.inputs.iter().map(ty_to_string).collect::<Vec<_>>().join(", "));
sig.push(')');
match decl.output {
hir::FnRetTy::DefaultReturn(_) => sig.push_str(" -> ()"),
hir::FnRetTy::Return(ref t) => sig.push_str(&format!(" -> {}", ty_to_string(t))),
}
sig
}
// An AST visitor for collecting paths (e.g., the names of structs) and formal
// variables (idents) from patterns.
struct PathCollector<'l> {

View File

@ -281,6 +281,22 @@ impl<'hir> Sig for hir::Ty<'hir> {
})
}
}
hir::TyKind::Path(hir::QPath::TypeRelative(ty, segment)) => {
let nested_ty = ty.make(offset + 1, id, scx)?;
let prefix = format!("<{}>::", nested_ty.text,);
let name = path_segment_to_string(segment);
let res = scx.get_path_res(id.ok_or("Missing id for Path")?);
let id = id_from_def_id(res.def_id());
let start = offset + prefix.len();
let end = start + name.len();
Ok(Signature {
text: prefix + &name,
defs: vec![],
refs: vec![SigElement { id, start, end }],
})
}
hir::TyKind::TraitObject(bounds, ..) => {
// FIXME recurse into bounds
let bounds: Vec<hir::GenericBound<'_>> = bounds
@ -308,11 +324,11 @@ impl<'hir> Sig for hir::Ty<'hir> {
let text = format!("[{}; {}]", nested_ty.text, expr);
Ok(replace_text(nested_ty, text))
}
hir::TyKind::Typeof(_)
| hir::TyKind::Infer
| hir::TyKind::Def(..)
| hir::TyKind::Path(..)
| hir::TyKind::Err => Err("Ty"),
hir::TyKind::Def(item_id, _) => {
let item = scx.tcx.hir().item(item_id.id);
item.make(offset, Some(item_id.id), scx)
}
hir::TyKind::Typeof(_) | hir::TyKind::Infer | hir::TyKind::Err => Err("Ty"),
}
}
}

View File

@ -15,6 +15,8 @@ use rustc_span::Span;
use super::method::probe;
use std::fmt;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn emit_coerce_suggestions(
&self,
@ -670,16 +672,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
checked_ty: Ty<'tcx>,
expected_ty: Ty<'tcx>,
) -> bool {
if self.tcx.hir().is_const_context(expr.hir_id) {
// Shouldn't suggest `.into()` on `const`s.
// FIXME(estebank): modify once we decide to suggest `as` casts
return false;
}
if self.tcx.sess.source_map().is_imported(expr.span) {
// Ignore if span is from within a macro.
return false;
}
let src = if let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) {
src
} else {
return false;
};
// If casting this expression to a given numeric type would be appropriate in case of a type
// mismatch.
//
@ -708,6 +711,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} else {
String::new()
};
if let hir::ExprKind::Call(path, args) = &expr.kind {
if let (hir::ExprKind::Path(hir::QPath::TypeRelative(base_ty, path_segment)), 1) =
(&path.kind, args.len())
@ -749,222 +753,200 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
checked_ty, expected_ty,
);
let needs_paren = expr.precedence().order() < (PREC_POSTFIX as i8);
if let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) {
let cast_suggestion = format!(
"{}{}{}{} as {}",
prefix,
if needs_paren { "(" } else { "" },
src,
if needs_paren { ")" } else { "" },
expected_ty,
);
let try_into_suggestion = format!(
"{}{}{}{}.try_into().unwrap()",
prefix,
if needs_paren { "(" } else { "" },
src,
if needs_paren { ")" } else { "" },
);
let into_suggestion = format!(
"{}{}{}{}.into()",
prefix,
if needs_paren { "(" } else { "" },
src,
if needs_paren { ")" } else { "" },
);
let suffix_suggestion = format!(
"{}{}{}{}",
if needs_paren { "(" } else { "" },
if let (ty::Int(_) | ty::Uint(_), ty::Float(_)) =
(&expected_ty.kind, &checked_ty.kind,)
{
// Remove fractional part from literal, for example `42.0f32` into `42`
let src = src.trim_end_matches(&checked_ty.to_string());
src.split('.').next().unwrap()
} else {
src.trim_end_matches(&checked_ty.to_string())
},
expected_ty,
if needs_paren { ")" } else { "" },
);
let literal_is_ty_suffixed = |expr: &hir::Expr<'_>| {
if let hir::ExprKind::Lit(lit) = &expr.kind {
lit.node.is_suffixed()
} else {
false
}
let with_opt_paren: fn(&dyn fmt::Display) -> String =
if expr.precedence().order() < PREC_POSTFIX {
|s| format!("({})", s)
} else {
|s| s.to_string()
};
let suggest_to_change_suffix_or_into =
|err: &mut DiagnosticBuilder<'_>, is_fallible: bool| {
let cast_suggestion = format!("{}{} as {}", prefix, with_opt_paren(&src), expected_ty);
let try_into_suggestion = format!("{}{}.try_into().unwrap()", prefix, with_opt_paren(&src));
let into_suggestion = format!("{}{}.into()", prefix, with_opt_paren(&src));
let suffix_suggestion = with_opt_paren(&format_args!(
"{}{}",
if matches!(
(&expected_ty.kind, &checked_ty.kind),
(ty::Int(_) | ty::Uint(_), ty::Float(_))
) {
// Remove fractional part from literal, for example `42.0f32` into `42`
let src = src.trim_end_matches(&checked_ty.to_string());
src.split('.').next().unwrap()
} else {
src.trim_end_matches(&checked_ty.to_string())
},
expected_ty,
));
let literal_is_ty_suffixed = |expr: &hir::Expr<'_>| {
if let hir::ExprKind::Lit(lit) = &expr.kind { lit.node.is_suffixed() } else { false }
};
let in_const_context = self.tcx.hir().is_inside_const_context(expr.hir_id);
let suggest_to_change_suffix_or_into =
|err: &mut DiagnosticBuilder<'_>, is_fallible: bool| {
let msg = if literal_is_ty_suffixed(expr) {
&lit_msg
} else if in_const_context {
// Do not recommend `into` or `try_into` in const contexts.
return;
} else if is_fallible {
&try_msg
} else {
&msg
};
let suggestion = if literal_is_ty_suffixed(expr) {
suffix_suggestion.clone()
} else if is_fallible {
try_into_suggestion
} else {
into_suggestion.clone()
};
err.span_suggestion(expr.span, msg, suggestion, Applicability::MachineApplicable);
};
match (&expected_ty.kind, &checked_ty.kind) {
(&ty::Int(ref exp), &ty::Int(ref found)) => {
let is_fallible = match (exp.bit_width(), found.bit_width()) {
(Some(exp), Some(found)) if exp < found => true,
(None, Some(8 | 16)) => false,
(None, _) | (_, None) => true,
_ => false,
};
suggest_to_change_suffix_or_into(err, is_fallible);
true
}
(&ty::Uint(ref exp), &ty::Uint(ref found)) => {
let is_fallible = match (exp.bit_width(), found.bit_width()) {
(Some(exp), Some(found)) if exp < found => true,
(None, Some(8 | 16)) => false,
(None, _) | (_, None) => true,
_ => false,
};
suggest_to_change_suffix_or_into(err, is_fallible);
true
}
(&ty::Int(exp), &ty::Uint(found)) => {
let is_fallible = match (exp.bit_width(), found.bit_width()) {
(Some(exp), Some(found)) if found < exp => false,
(None, Some(8)) => false,
_ => true,
};
suggest_to_change_suffix_or_into(err, is_fallible);
true
}
(&ty::Uint(_), &ty::Int(_)) => {
suggest_to_change_suffix_or_into(err, true);
true
}
(&ty::Float(ref exp), &ty::Float(ref found)) => {
if found.bit_width() < exp.bit_width() {
suggest_to_change_suffix_or_into(err, false);
} else if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
if literal_is_ty_suffixed(expr) {
&lit_msg
} else if is_fallible {
&try_msg
} else {
&msg
},
if literal_is_ty_suffixed(expr) {
suffix_suggestion.clone()
} else if is_fallible {
try_into_suggestion
} else {
into_suggestion.clone()
},
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
};
match (&expected_ty.kind, &checked_ty.kind) {
(&ty::Int(ref exp), &ty::Int(ref found)) => {
let is_fallible = match (exp.bit_width(), found.bit_width()) {
(Some(exp), Some(found)) if exp < found => true,
(None, Some(8 | 16)) => false,
(None, _) | (_, None) => true,
_ => false,
};
suggest_to_change_suffix_or_into(err, is_fallible);
true
} else if can_cast {
// Missing try_into implementation for `f64` to `f32`
err.span_suggestion(
expr.span,
&format!("{}, producing the closest possible value", cast_msg),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
(&ty::Uint(ref exp), &ty::Uint(ref found)) => {
let is_fallible = match (exp.bit_width(), found.bit_width()) {
(Some(exp), Some(found)) if exp < found => true,
(None, Some(8 | 16)) => false,
(None, _) | (_, None) => true,
_ => false,
};
suggest_to_change_suffix_or_into(err, is_fallible);
true
}
(&ty::Int(exp), &ty::Uint(found)) => {
let is_fallible = match (exp.bit_width(), found.bit_width()) {
(Some(exp), Some(found)) if found < exp => false,
(None, Some(8)) => false,
_ => true,
};
suggest_to_change_suffix_or_into(err, is_fallible);
true
}
(&ty::Uint(_), &ty::Int(_)) => {
suggest_to_change_suffix_or_into(err, true);
true
}
(&ty::Float(ref exp), &ty::Float(ref found)) => {
if found.bit_width() < exp.bit_width() {
suggest_to_change_suffix_or_into(err, false);
} else if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
} else if can_cast {
// Missing try_into implementation for `f64` to `f32`
err.span_suggestion(
expr.span,
&format!("{}, producing the closest possible value", cast_msg),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
true
}
(&ty::Uint(_) | &ty::Int(_), &ty::Float(_)) => {
if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
} else if can_cast {
// Missing try_into implementation for `{float}` to `{integer}`
err.span_suggestion(
expr.span,
&format!("{}, rounding the float towards zero", msg),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
true
}
(&ty::Float(ref exp), &ty::Uint(ref found)) => {
// if `found` is `None` (meaning found is `usize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer",
msg,
),
into_suggestion,
Applicability::MachineApplicable,
);
} else if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
} else {
// Missing try_into implementation for `{integer}` to `{float}`
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer,
rounded if necessary",
cast_msg,
),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
true
}
(&ty::Float(ref exp), &ty::Int(ref found)) => {
// if `found` is `None` (meaning found is `isize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer",
&msg,
),
into_suggestion,
Applicability::MachineApplicable,
);
} else if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
} else {
// Missing try_into implementation for `{integer}` to `{float}`
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer, \
rounded if necessary",
&msg,
),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
true
}
_ => false,
true
}
} else {
false
(&ty::Uint(_) | &ty::Int(_), &ty::Float(_)) => {
if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
} else if can_cast {
// Missing try_into implementation for `{float}` to `{integer}`
err.span_suggestion(
expr.span,
&format!("{}, rounding the float towards zero", msg),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
true
}
(&ty::Float(ref exp), &ty::Uint(ref found)) => {
// if `found` is `None` (meaning found is `usize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer",
msg,
),
into_suggestion,
Applicability::MachineApplicable,
);
} else if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
} else {
// Missing try_into implementation for `{integer}` to `{float}`
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer,
rounded if necessary",
cast_msg,
),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
true
}
(&ty::Float(ref exp), &ty::Int(ref found)) => {
// if `found` is `None` (meaning found is `isize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer",
&msg,
),
into_suggestion,
Applicability::MachineApplicable,
);
} else if literal_is_ty_suffixed(expr) {
err.span_suggestion(
expr.span,
&lit_msg,
suffix_suggestion,
Applicability::MachineApplicable,
);
} else {
// Missing try_into implementation for `{integer}` to `{float}`
err.span_suggestion(
expr.span,
&format!(
"{}, producing the floating point representation of the integer, \
rounded if necessary",
&msg,
),
cast_suggestion,
Applicability::MaybeIncorrect, // lossy conversion
);
}
true
}
_ => false,
}
}
}

View File

@ -5091,7 +5091,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expected: Ty<'tcx>,
found: Ty<'tcx>,
) {
if self.tcx.hir().is_const_context(expr.hir_id) {
if self.tcx.hir().is_inside_const_context(expr.hir_id) {
// Do not suggest `Box::new` in const context.
return;
}
@ -5128,7 +5128,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) -> bool {
// Handle #68197.
if self.tcx.hir().is_const_context(expr.hir_id) {
if self.tcx.hir().is_inside_const_context(expr.hir_id) {
// Do not suggest `Box::new` in const context.
return false;
}

View File

@ -126,7 +126,7 @@ impl<'a> SourceCollector<'a> {
&self.scx.themes,
);
self.scx.fs.write(&cur, v.as_bytes())?;
self.scx.local_sources.insert(p.clone(), href);
self.scx.local_sources.insert(p, href);
Ok(())
}
}

View File

@ -12,6 +12,7 @@ use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::ty;
use rustc_resolve::ParentScope;
use rustc_session::lint;
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::Ident;
use rustc_span::symbol::Symbol;
use rustc_span::DUMMY_SP;
@ -122,6 +123,42 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
}
}
/// Resolves a string as a macro.
fn macro_resolve(&self, path_str: &str, parent_id: Option<hir::HirId>) -> Option<Res> {
let cx = self.cx;
let path = ast::Path::from_ident(Ident::from_str(path_str));
cx.enter_resolver(|resolver| {
if let Ok((Some(ext), res)) = resolver.resolve_macro_path(
&path,
None,
&ParentScope::module(resolver.graph_root()),
false,
false,
) {
if let SyntaxExtensionKind::LegacyBang { .. } = ext.kind {
return Some(res.map_id(|_| panic!("unexpected id")));
}
}
if let Some(res) = resolver.all_macros().get(&Symbol::intern(path_str)) {
return Some(res.map_id(|_| panic!("unexpected id")));
}
if let Some(module_id) = parent_id.or(self.mod_ids.last().cloned()) {
let module_id = cx.tcx.hir().local_def_id(module_id);
if let Ok((_, res)) =
resolver.resolve_str_path_error(DUMMY_SP, path_str, MacroNS, module_id)
{
// don't resolve builtins like `#[derive]`
if let Res::Def(..) = res {
let res = res.map_id(|_| panic!("unexpected node_id"));
return Some(res);
}
}
} else {
debug!("attempting to resolve item without parent module: {}", path_str);
}
None
})
}
/// Resolves a string as a path within a particular namespace. Also returns an optional
/// URL fragment in the case of variants and methods.
fn resolve(
@ -371,6 +408,22 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
}
}
/// Check for resolve collisions between a trait and its derive
///
/// These are common and we should just resolve to the trait in that case
fn is_derive_trait_collision<T>(ns: &PerNS<Option<(Res, T)>>) -> bool {
if let PerNS {
type_ns: Some((Res::Def(DefKind::Trait, _), _)),
macro_ns: Some((Res::Def(DefKind::Macro(MacroKind::Derive), _), _)),
..
} = *ns
{
true
} else {
false
}
}
impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
fn fold_item(&mut self, mut item: Item) -> Option<Item> {
let item_hir_id = if item.is_mod() {
@ -451,7 +504,7 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
..
},
..
})) => segments.first().and_then(|seg| Some(seg.ident.to_string())),
})) => segments.first().map(|seg| seg.ident.to_string()),
Some(hir::Node::Item(hir::Item {
ident, kind: hir::ItemKind::Enum(..), ..
}))
@ -532,6 +585,9 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
} else if link.starts_with("macro@") {
kind = Some(MacroNS);
link.trim_start_matches("macro@")
} else if link.starts_with("derive@") {
kind = Some(MacroNS);
link.trim_start_matches("derive@")
} else if link.ends_with('!') {
kind = Some(MacroNS);
link.trim_end_matches('!')
@ -614,8 +670,9 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
}
None => {
// Try everything!
let candidates = PerNS {
macro_ns: macro_resolve(cx, path_str)
let mut candidates = PerNS {
macro_ns: self
.macro_resolve(path_str, base_node)
.map(|res| (res, extra_fragment.clone())),
type_ns: match self.resolve(
path_str,
@ -668,10 +725,16 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
continue;
}
let is_unambiguous = candidates.clone().present_items().count() == 1;
if is_unambiguous {
let len = candidates.clone().present_items().count();
if len == 1 {
candidates.present_items().next().unwrap()
} else if len == 2 && is_derive_trait_collision(&candidates) {
candidates.type_ns.unwrap()
} else {
if is_derive_trait_collision(&candidates) {
candidates.macro_ns = None;
}
ambiguity_error(
cx,
&item,
@ -684,7 +747,7 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
}
}
Some(MacroNS) => {
if let Some(res) = macro_resolve(cx, path_str) {
if let Some(res) = self.macro_resolve(path_str, base_node) {
(res, extra_fragment)
} else {
resolution_failure(cx, &item, path_str, &dox, link_range);
@ -727,28 +790,6 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
}
}
/// Resolves a string as a macro.
fn macro_resolve(cx: &DocContext<'_>, path_str: &str) -> Option<Res> {
let path = ast::Path::from_ident(Ident::from_str(path_str));
cx.enter_resolver(|resolver| {
if let Ok((Some(ext), res)) = resolver.resolve_macro_path(
&path,
None,
&ParentScope::module(resolver.graph_root()),
false,
false,
) {
if let SyntaxExtensionKind::LegacyBang { .. } = ext.kind {
return Some(res.map_id(|_| panic!("unexpected id")));
}
}
if let Some(res) = resolver.all_macros().get(&Symbol::intern(path_str)) {
return Some(res.map_id(|_| panic!("unexpected id")));
}
None
})
}
fn build_diagnostic(
cx: &DocContext<'_>,
item: &Item,
@ -916,7 +957,7 @@ fn ambiguity_error(
Res::Def(DefKind::AssocFn | DefKind::Fn, _) => {
("add parentheses", format!("{}()", path_str))
}
Res::Def(DefKind::Macro(..), _) => {
Res::Def(DefKind::Macro(MacroKind::Bang), _) => {
("add an exclamation mark", format!("{}!", path_str))
}
_ => {
@ -930,6 +971,9 @@ fn ambiguity_error(
(Res::Def(DefKind::Mod, _), _) => "module",
(_, TypeNS) => "type",
(_, ValueNS) => "value",
(Res::Def(DefKind::Macro(MacroKind::Derive), _), MacroNS) => {
"derive"
}
(_, MacroNS) => "macro",
};

View File

@ -0,0 +1,35 @@
// force-host
// no-prefer-dynamic
// compile-flags: --crate-type proc-macro
#![crate_type="proc-macro"]
#![crate_name="intra_link_proc_macro_macro"]
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(DeriveA)]
pub fn a_derive(input: TokenStream) -> TokenStream {
input
}
#[proc_macro_derive(DeriveB)]
pub fn b_derive(input: TokenStream) -> TokenStream {
input
}
#[proc_macro_derive(DeriveTrait)]
pub fn trait_derive(input: TokenStream) -> TokenStream {
input
}
#[proc_macro_attribute]
pub fn attr_a(input: TokenStream, _args: TokenStream) -> TokenStream {
input
}
#[proc_macro_attribute]
pub fn attr_b(input: TokenStream, _args: TokenStream) -> TokenStream {
input
}

View File

@ -0,0 +1,27 @@
// aux-build:intra-link-proc-macro-macro.rs
// build-aux-docs
#![deny(intra_doc_link_resolution_failure)]
extern crate intra_link_proc_macro_macro;
pub use intra_link_proc_macro_macro::{DeriveA, attr_a};
use intra_link_proc_macro_macro::{DeriveB, attr_b};
// @has intra_link_proc_macro/struct.Foo.html
// @has - '//a/@href' '../intra_link_proc_macro/derive.DeriveA.html'
// @has - '//a/@href' '../intra_link_proc_macro/attr.attr_a.html'
// @has - '//a/@href' '../intra_link_proc_macro/trait.DeriveTrait.html'
// @has - '//a/@href' '../intra_link_proc_macro_macro/derive.DeriveB.html'
// @has - '//a/@href' '../intra_link_proc_macro_macro/attr.attr_b.html'
/// Link to [DeriveA], [attr_a], [DeriveB], [attr_b], [DeriveTrait]
pub struct Foo;
// @has intra_link_proc_macro/struct.Bar.html
// @has - '//a/@href' '../intra_link_proc_macro/derive.DeriveA.html'
// @has - '//a/@href' '../intra_link_proc_macro/attr.attr_a.html'
/// Link to [deriveA](derive@DeriveA) [attr](macro@attr_a)
pub struct Bar;
// this should not cause ambiguity errors
pub trait DeriveTrait {}

View File

@ -11,10 +11,6 @@ LL | | }
| |_- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
help: you can convert an `i32` to `isize` and panic if the converted value wouldn't fit
|
LL | $( $v = $s::V.try_into().unwrap(), )*
| ^^^^^^^^^^^^^^^^^^^^^^^^^
error[E0308]: mismatched types
--> $DIR/enum-discr-type-err.rs:18:21
@ -29,10 +25,6 @@ LL | | }
| |_- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
help: you can convert an `i32` to `isize` and panic if the converted value wouldn't fit
|
LL | $( $v = $s::V.try_into().unwrap(), )*
| ^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 2 previous errors

View File

@ -3,11 +3,6 @@ error[E0308]: mismatched types
|
LL | X = Trait::Number,
| ^^^^^^^^^^^^^ expected `isize`, found `i32`
|
help: you can convert an `i32` to `isize` and panic if the converted value wouldn't fit
|
LL | X = Trait::Number.try_into().unwrap(),
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to previous error

File diff suppressed because it is too large Load Diff

View File

@ -3,6 +3,11 @@ error[E0308]: mismatched types
|
LL | const C: i32 = 1i8;
| ^^^ expected `i32`, found `i8`
|
help: change the type of the numeric literal from `i8` to `i32`
|
LL | const C: i32 = 1i32;
| ^^^^
error[E0308]: mismatched types
--> $DIR/const-scope.rs:2:15
@ -17,6 +22,11 @@ LL | let c: i32 = 1i8;
| --- ^^^ expected `i32`, found `i8`
| |
| expected due to this
|
help: change the type of the numeric literal from `i8` to `i32`
|
LL | let c: i32 = 1i32;
| ^^^^
error[E0308]: mismatched types
--> $DIR/const-scope.rs:6:17

View File

@ -1,4 +1,4 @@
error: unterminated character literal
error[E0762]: unterminated character literal
--> $DIR/lex-bad-char-literals-4.rs:4:5
|
LL | '●
@ -6,3 +6,4 @@ LL | '●
error: aborting due to previous error
For more information about this error, try `rustc --explain E0762`.

View File

@ -10,7 +10,7 @@ error: empty unicode escape (must have at least 1 hex digit)
LL | let _: char = '\u{}';
| ^^^^
error: unterminated character literal
error[E0762]: unterminated character literal
--> $DIR/lex-bad-char-literals-7.rs:11:13
|
LL | let _ = ' hello // here's a comment
@ -18,3 +18,4 @@ LL | let _ = ' hello // here's a comment
error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0762`.

View File

@ -22,6 +22,9 @@ fn main() {
let f = [0_usize; -1_isize];
//~^ ERROR mismatched types
//~| expected `usize`, found `isize`
let f = [0; 4u8];
//~^ ERROR mismatched types
//~| expected `usize`, found `u8`
struct G {
g: (),
}

View File

@ -29,7 +29,7 @@ LL | let e = [0; "foo"];
| ^^^^^ expected `usize`, found `&str`
error[E0308]: mismatched types
--> $DIR/repeat_count.rs:28:17
--> $DIR/repeat_count.rs:31:17
|
LL | let g = [0; G { g: () }];
| ^^^^^^^^^^^ expected `usize`, found struct `main::G`
@ -39,24 +39,25 @@ error[E0308]: mismatched types
|
LL | let f = [0; -4_isize];
| ^^^^^^^^ expected `usize`, found `isize`
|
help: you can convert an `isize` to `usize` and panic if the converted value wouldn't fit
|
LL | let f = [0; (-4_isize).try_into().unwrap()];
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error[E0308]: mismatched types
--> $DIR/repeat_count.rs:22:23
|
LL | let f = [0_usize; -1_isize];
| ^^^^^^^^ expected `usize`, found `isize`
|
help: you can convert an `isize` to `usize` and panic if the converted value wouldn't fit
|
LL | let f = [0_usize; (-1_isize).try_into().unwrap()];
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 8 previous errors
error[E0308]: mismatched types
--> $DIR/repeat_count.rs:25:17
|
LL | let f = [0; 4u8];
| ^^^ expected `usize`, found `u8`
|
help: change the type of the numeric literal from `u8` to `usize`
|
LL | let f = [0; 4usize];
| ^^^^^^
error: aborting due to 9 previous errors
Some errors have detailed explanations: E0308, E0435.
For more information about an error, try `rustc --explain E0308`.

View File

@ -58,10 +58,16 @@ label = "O-ARM"
[prioritize]
label = "I-prioritize"
prioritize_on = ["regression-from-stable-to-stable", "regression-from-stable-to-beta", "regression-from-stable-to-nightly"]
prioritize_on = [
"regression-from-stable-to-stable",
"regression-from-stable-to-beta",
"regression-from-stable-to-nightly",
"I-unsound 💥",
]
exclude_labels = [
"P-*",
"T-infra",
"T-release",
"requires-nightly",
]
zulip_stream = 227806