diff --git a/RELEASES.md b/RELEASES.md index 4185961187b..91e3c5f7219 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -157,7 +157,7 @@ Libraries produce a warning if their returning type is unused. - [The methods `checked_pow`, `saturating_pow`, `wrapping_pow`, and `overflowing_pow` are now available for all numeric types.][57873] These are - equivalvent to methods such as `wrapping_add` for the `pow` operation. + equivalent to methods such as `wrapping_add` for the `pow` operation. Stabilized APIs diff --git a/src/liballoc/tests/vec.rs b/src/liballoc/tests/vec.rs index 3307bdf94f9..5ddac673c9f 100644 --- a/src/liballoc/tests/vec.rs +++ b/src/liballoc/tests/vec.rs @@ -1152,3 +1152,24 @@ fn test_try_reserve_exact() { } } + +#[test] +fn test_stable_push_pop() { + // Test that, if we reserved enough space, adding and removing elements does not + // invalidate references into the vector (such as `v0`). This test also + // runs in Miri, which would detect such problems. + let mut v = Vec::with_capacity(10); + v.push(13); + + // laundering the lifetime -- we take care that `v` does not reallocate, so that's okay. + let v0 = unsafe { &*(&v[0] as *const _) }; + + // Now do a bunch of things and occasionally use `v0` again to assert it is still valid. + v.push(1); + v.push(2); + v.insert(1, 1); + assert_eq!(*v0, 13); + v.remove(1); + v.pop().unwrap(); + assert_eq!(*v0, 13); +} diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index dc661a267e2..5cb91395b7b 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -735,6 +735,75 @@ impl Vec { self } + /// Returns a raw pointer to the vector's buffer. + /// + /// The caller must ensure that the vector outlives the pointer this + /// function returns, or else it will end up pointing to garbage. + /// Modifying the vector may cause its buffer to be reallocated, + /// which would also make any pointers to it invalid. + /// + /// The caller must also ensure that the memory the pointer (non-transitively) points to + /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer + /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`]. + /// + /// # Examples + /// + /// ``` + /// let x = vec![1, 2, 4]; + /// let x_ptr = x.as_ptr(); + /// + /// unsafe { + /// for i in 0..x.len() { + /// assert_eq!(*x_ptr.add(i), 1 << i); + /// } + /// } + /// ``` + /// + /// [`as_mut_ptr`]: #method.as_mut_ptr + #[stable(feature = "vec_as_ptr", since = "1.37.0")] + #[inline] + pub fn as_ptr(&self) -> *const T { + // We shadow the slice method of the same name to avoid going through + // `deref`, which creates an intermediate reference. + let ptr = self.buf.ptr(); + unsafe { assume(!ptr.is_null()); } + ptr + } + + /// Returns an unsafe mutable pointer to the vector's buffer. + /// + /// The caller must ensure that the vector outlives the pointer this + /// function returns, or else it will end up pointing to garbage. + /// Modifying the vector may cause its buffer to be reallocated, + /// which would also make any pointers to it invalid. + /// + /// # Examples + /// + /// ``` + /// // Allocate vector big enough for 4 elements. + /// let size = 4; + /// let mut x: Vec = Vec::with_capacity(size); + /// let x_ptr = x.as_mut_ptr(); + /// + /// // Initialize elements via raw pointer writes, then set length. + /// unsafe { + /// for i in 0..size { + /// *x_ptr.add(i) = i as i32; + /// } + /// x.set_len(size); + /// } + /// assert_eq!(&*x, &[0,1,2,3]); + /// ``` + #[stable(feature = "vec_as_ptr", since = "1.37.0")] + #[inline] + pub fn as_mut_ptr(&mut self) -> *mut T { + // We shadow the slice method of the same name to avoid going through + // `deref_mut`, which creates an intermediate reference. + let ptr = self.buf.ptr(); + unsafe { assume(!ptr.is_null()); } + ptr + } + /// Forces the length of the vector to `new_len`. /// /// This is a low-level operation that maintains none of the normal @@ -1706,9 +1775,7 @@ impl ops::Deref for Vec { fn deref(&self) -> &[T] { unsafe { - let p = self.buf.ptr(); - assume(!p.is_null()); - slice::from_raw_parts(p, self.len) + slice::from_raw_parts(self.as_ptr(), self.len) } } } @@ -1717,9 +1784,7 @@ impl ops::Deref for Vec { impl ops::DerefMut for Vec { fn deref_mut(&mut self) -> &mut [T] { unsafe { - let ptr = self.buf.ptr(); - assume(!ptr.is_null()); - slice::from_raw_parts_mut(ptr, self.len) + slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } } } @@ -1754,7 +1819,6 @@ impl IntoIterator for Vec { fn into_iter(mut self) -> IntoIter { unsafe { let begin = self.as_mut_ptr(); - assume(!begin.is_null()); let end = if mem::size_of::() == 0 { arith_offset(begin as *const i8, self.len() as isize) as *const T } else { diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index 56869f38a4f..ce4aee7ebc5 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -982,7 +982,7 @@ impl DerefMut for ManuallyDrop { /// out.write(vec![1, 2, 3]); /// } /// -/// let mut v: MaybeUninit> = MaybeUninit::uninit(); +/// let mut v = MaybeUninit::uninit(); /// unsafe { make_vec(v.as_mut_ptr()); } /// // Now we know `v` is initialized! This also makes sure the vector gets /// // properly dropped. @@ -1071,7 +1071,7 @@ impl DerefMut for ManuallyDrop { /// optimizations, potentially resulting in a larger size: /// /// ```rust -/// # use std::mem::{MaybeUninit, size_of, align_of}; +/// # use std::mem::{MaybeUninit, size_of}; /// assert_eq!(size_of::>(), 1); /// assert_eq!(size_of::>>(), 2); /// ``` diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 1cc9a2c0e8a..2324c3f0428 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -10,7 +10,6 @@ use crate::ich::Fingerprint; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::{IndexVec}; use rustc_data_structures::stable_hasher::StableHasher; -use serialize::{Encodable, Decodable, Encoder, Decoder}; use crate::session::CrateDisambiguator; use std::borrow::Borrow; use std::fmt::Write; @@ -25,14 +24,13 @@ use crate::util::nodemap::NodeMap; /// Internally the DefPathTable holds a tree of DefKeys, where each DefKey /// stores the DefIndex of its parent. /// There is one DefPathTable for each crate. -#[derive(Clone, Default)] +#[derive(Clone, Default, RustcDecodable, RustcEncodable)] pub struct DefPathTable { index_to_key: Vec, def_path_hashes: Vec, } impl DefPathTable { - fn allocate(&mut self, key: DefKey, def_path_hash: DefPathHash) @@ -86,28 +84,6 @@ impl DefPathTable { } } - -impl Encodable for DefPathTable { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - // Index to key - self.index_to_key.encode(s)?; - - // DefPath hashes - self.def_path_hashes.encode(s)?; - - Ok(()) - } -} - -impl Decodable for DefPathTable { - fn decode(d: &mut D) -> Result { - Ok(DefPathTable { - index_to_key: Decodable::decode(d)?, - def_path_hashes : Decodable::decode(d)?, - }) - } -} - /// The definition table containing node definitions. /// It holds the `DefPathTable` for local `DefId`s/`DefPath`s and it also stores a /// mapping from `NodeId`s to local `DefId`s. diff --git a/src/librustc_mir/borrow_check/conflict_errors.rs b/src/librustc_mir/borrow_check/conflict_errors.rs index 4253962f144..8022d1f0c73 100644 --- a/src/librustc_mir/borrow_check/conflict_errors.rs +++ b/src/librustc_mir/borrow_check/conflict_errors.rs @@ -158,18 +158,6 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { span, format!("value moved{} here, in previous iteration of loop", move_msg), ); - if Some(CompilerDesugaringKind::ForLoop) == span.compiler_desugaring_kind() { - if let Ok(snippet) = self.infcx.tcx.sess.source_map() - .span_to_snippet(span) - { - err.span_suggestion( - move_span, - "consider borrowing this to avoid moving it into the for loop", - format!("&{}", snippet), - Applicability::MaybeIncorrect, - ); - } - } is_loop_move = true; } else if move_site.traversed_back_edge { err.span_label( @@ -185,7 +173,17 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { &mut err, format!("variable moved due to use{}", move_spans.describe()), ); - }; + } + if Some(CompilerDesugaringKind::ForLoop) == move_span.compiler_desugaring_kind() { + if let Ok(snippet) = self.infcx.tcx.sess.source_map().span_to_snippet(span) { + err.span_suggestion( + move_span, + "consider borrowing to avoid moving into the for loop", + format!("&{}", snippet), + Applicability::MaybeIncorrect, + ); + } + } } use_spans.var_span_label( diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index e26b147ea58..de788a22886 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -475,6 +475,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> PlaceBase::Local(mir::RETURN_PLACE) => return err!(ReadFromReturnPointer), PlaceBase::Local(local) => { // FIXME use place_projection.is_empty() when is available + // Do not use the layout passed in as argument if the base we are looking at + // here is not the entire place. let layout = if let Place::Base(_) = mir_place { layout } else { diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index 2f6793e0494..b2976cd501e 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -295,6 +295,7 @@ impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> { } fn eval_place(&mut self, place: &Place<'tcx>, source_info: SourceInfo) -> Option> { + trace!("eval_place(place={:?})", place); match *place { Place::Base(PlaceBase::Local(loc)) => self.places[loc].clone(), Place::Projection(ref proj) => match proj.elem { @@ -516,6 +517,7 @@ impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> { } fn replace_with_const(&self, rval: &mut Rvalue<'tcx>, value: Const<'tcx>, span: Span) { + trace!("attepting to replace {:?} with {:?}", rval, value); self.ecx.validate_operand( value, vec![], @@ -579,6 +581,10 @@ impl CanConstProp { // FIXME(oli-obk): lint variables until they are used in a condition // FIXME(oli-obk): lint if return value is constant *val = mir.local_kind(local) == LocalKind::Temp; + + if !*val { + trace!("local {:?} can't be propagated because it's not a temporary", local); + } } cpv.visit_mir(mir); cpv.can_const_prop @@ -598,6 +604,7 @@ impl<'tcx> Visitor<'tcx> for CanConstProp { // FIXME(oli-obk): we could be more powerful here, if the multiple writes // only occur in independent execution paths MutatingUse(MutatingUseContext::Store) => if self.found_assignment[local] { + trace!("local {:?} can't be propagated because of multiple assignments", local); self.can_const_prop[local] = false; } else { self.found_assignment[local] = true @@ -609,7 +616,10 @@ impl<'tcx> Visitor<'tcx> for CanConstProp { NonMutatingUse(NonMutatingUseContext::Projection) | MutatingUse(MutatingUseContext::Projection) | NonUse(_) => {}, - _ => self.can_const_prop[local] = false, + _ => { + trace!("local {:?} can't be propagaged because it's used: {:?}", local, context); + self.can_const_prop[local] = false; + }, } } } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index b5bb62a0f46..b9713e844d6 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -5194,7 +5194,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { &self, res: Res, span: Span, - ) -> Result<(DefKind, DefId, Ty<'tcx>), ErrorReported> { + ) -> Result { let tcx = self.tcx; if let Res::SelfCtor(impl_def_id) = res { let ty = self.impl_self_ty(span, impl_def_id).ty; @@ -5204,11 +5204,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Some(adt_def) if adt_def.has_ctor() => { let variant = adt_def.non_enum_variant(); let ctor_def_id = variant.ctor_def_id.unwrap(); - Ok(( - DefKind::Ctor(CtorOf::Struct, variant.ctor_kind), - ctor_def_id, - tcx.type_of(ctor_def_id), - )) + Ok(Res::Def(DefKind::Ctor(CtorOf::Struct, variant.ctor_kind), ctor_def_id)) } _ => { let mut err = tcx.sess.struct_span_err(span, @@ -5235,15 +5231,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } } else { - match res { - Res::Def(kind, def_id) => { - // The things we are substituting into the type should not contain - // escaping late-bound regions, and nor should the base type scheme. - let ty = tcx.type_of(def_id); - Ok((kind, def_id, ty)) - } - _ => span_bug!(span, "unexpected res in rewrite_self_ctor: {:?}", res), - } + Ok(res) } } @@ -5266,27 +5254,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let tcx = self.tcx; - match res { - Res::Local(hid) | Res::Upvar(hid, ..) => { - let ty = self.local_ty(span, hid).decl_ty; - let ty = self.normalize_associated_types_in(span, &ty); - self.write_ty(hir_id, ty); - return (ty, res); - } - _ => {} - } - - let (kind, def_id, ty) = match self.rewrite_self_ctor(res, span) { - Ok(result) => result, + let res = match self.rewrite_self_ctor(res, span) { + Ok(res) => res, Err(ErrorReported) => return (tcx.types.err, res), }; - let path_segs = - AstConv::def_ids_for_value_path_segments(self, segments, self_ty, kind, def_id); + let path_segs = match res { + Res::Local(_) | Res::Upvar(..) => Vec::new(), + Res::Def(kind, def_id) => + AstConv::def_ids_for_value_path_segments(self, segments, self_ty, kind, def_id), + _ => bug!("instantiate_value_path on {:?}", res), + }; let mut user_self_ty = None; let mut is_alias_variant_ctor = false; - match kind { - DefKind::Ctor(CtorOf::Variant, _) => { + match res { + Res::Def(DefKind::Ctor(CtorOf::Variant, _), _) => { if let Some(self_ty) = self_ty { let adt_def = self_ty.ty_adt_def().unwrap(); user_self_ty = Some(UserSelfTy { @@ -5296,8 +5278,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { is_alias_variant_ctor = true; } } - DefKind::Method - | DefKind::AssociatedConst => { + Res::Def(DefKind::Method, def_id) + | Res::Def(DefKind::AssociatedConst, def_id) => { let container = tcx.associated_item(def_id).container; debug!("instantiate_value_path: def_id={:?} container={:?}", def_id, container); match container { @@ -5337,6 +5319,17 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { None } })); + + match res { + Res::Local(hid) | Res::Upvar(hid, ..) => { + let ty = self.local_ty(span, hid).decl_ty; + let ty = self.normalize_associated_types_in(span, &ty); + self.write_ty(hir_id, ty); + return (ty, res); + } + _ => {} + } + if generics_has_err { // Don't try to infer type parameters when prohibited generic arguments were given. user_self_ty = None; @@ -5374,6 +5367,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { tcx.generics_of(*def_id).has_self }).unwrap_or(false); + let def_id = res.def_id(); + + // The things we are substituting into the type should not contain + // escaping late-bound regions, and nor should the base type scheme. + let ty = tcx.type_of(def_id); + let substs = AstConv::create_substs_for_generic_args( tcx, def_id, @@ -5490,7 +5489,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { ty_substituted); self.write_substs(hir_id, substs); - (ty_substituted, Res::Def(kind, def_id)) + (ty_substituted, res) } fn check_rustc_args_require_const(&self, diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 810acc9cc92..9431b559da5 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -1,19 +1,101 @@ use crate::ast; use crate::ast::{ - BlockCheckMode, Expr, ExprKind, Item, ItemKind, Pat, PatKind, QSelf, Ty, TyKind, VariantData, + BlockCheckMode, BinOpKind, Expr, ExprKind, Item, ItemKind, Pat, PatKind, PathSegment, QSelf, + Ty, TyKind, VariantData, }; -use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType}; -use crate::parse::token; -use crate::parse::PResult; -use crate::parse::Parser; +use crate::parse::{SeqSep, token, PResult, Parser}; +use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType}; use crate::print::pprust; use crate::ptr::P; use crate::source_map::Spanned; use crate::symbol::kw; use crate::ThinVec; -use errors::{Applicability, DiagnosticBuilder}; -use log::debug; -use syntax_pos::{Span, DUMMY_SP}; +use crate::util::parser::AssocOp; +use errors::{Applicability, DiagnosticBuilder, DiagnosticId}; +use syntax_pos::{Span, DUMMY_SP, MultiSpan}; +use log::{debug, trace}; + +pub enum Error { + FileNotFoundForModule { + mod_name: String, + default_path: String, + secondary_path: String, + dir_path: String, + }, + DuplicatePaths { + mod_name: String, + default_path: String, + secondary_path: String, + }, + UselessDocComment, + InclusiveRangeWithNoEnd, +} + +impl Error { + fn span_err>( + self, + sp: S, + handler: &errors::Handler, + ) -> DiagnosticBuilder<'_> { + match self { + Error::FileNotFoundForModule { + ref mod_name, + ref default_path, + ref secondary_path, + ref dir_path, + } => { + let mut err = struct_span_err!( + handler, + sp, + E0583, + "file not found for module `{}`", + mod_name, + ); + err.help(&format!( + "name the file either {} or {} inside the directory \"{}\"", + default_path, + secondary_path, + dir_path, + )); + err + } + Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { + let mut err = struct_span_err!( + handler, + sp, + E0584, + "file for module `{}` found at both {} and {}", + mod_name, + default_path, + secondary_path, + ); + err.help("delete or rename one of them to remove the ambiguity"); + err + } + Error::UselessDocComment => { + let mut err = struct_span_err!( + handler, + sp, + E0585, + "found a documentation comment that doesn't document anything", + ); + err.help("doc comments must come before what they document, maybe a comment was \ + intended with `//`?"); + err + } + Error::InclusiveRangeWithNoEnd => { + let mut err = struct_span_err!( + handler, + sp, + E0586, + "inclusive range with no end", + ); + err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); + err + } + } + } +} pub trait RecoverQPath: Sized + 'static { const PATH_STYLE: PathStyle = PathStyle::Expr; @@ -63,6 +145,364 @@ impl RecoverQPath for Expr { } impl<'a> Parser<'a> { + pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { + self.span_fatal(self.span, m) + } + + pub fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + self.sess.span_diagnostic.struct_span_fatal(sp, m) + } + + pub fn span_fatal_err>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { + err.span_err(sp, self.diagnostic()) + } + + pub fn bug(&self, m: &str) -> ! { + self.sess.span_diagnostic.span_bug(self.span, m) + } + + pub fn span_err>(&self, sp: S, m: &str) { + self.sess.span_diagnostic.span_err(sp, m) + } + + crate fn struct_span_err>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + self.sess.span_diagnostic.struct_span_err(sp, m) + } + + crate fn span_bug>(&self, sp: S, m: &str) -> ! { + self.sess.span_diagnostic.span_bug(sp, m) + } + + crate fn cancel(&self, err: &mut DiagnosticBuilder<'_>) { + self.sess.span_diagnostic.cancel(err) + } + + crate fn diagnostic(&self) -> &'a errors::Handler { + &self.sess.span_diagnostic + } + + crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { + let mut err = self.struct_span_err( + self.span, + &format!("expected identifier, found {}", self.this_token_descr()), + ); + if let token::Ident(ident, false) = &self.token { + if ident.is_raw_guess() { + err.span_suggestion( + self.span, + "you can escape reserved keywords to use them as identifiers", + format!("r#{}", ident), + Applicability::MaybeIncorrect, + ); + } + } + if let Some(token_descr) = self.token_descr() { + err.span_label(self.span, format!("expected identifier, found {}", token_descr)); + } else { + err.span_label(self.span, "expected identifier"); + if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { + err.span_suggestion( + self.span, + "remove this comma", + String::new(), + Applicability::MachineApplicable, + ); + } + } + err + } + + pub fn expected_one_of_not_found( + &mut self, + edible: &[token::Token], + inedible: &[token::Token], + ) -> PResult<'a, bool /* recovered */> { + fn tokens_to_string(tokens: &[TokenType]) -> String { + let mut i = tokens.iter(); + // This might be a sign we need a connect method on Iterator. + let b = i.next() + .map_or(String::new(), |t| t.to_string()); + i.enumerate().fold(b, |mut b, (i, a)| { + if tokens.len() > 2 && i == tokens.len() - 2 { + b.push_str(", or "); + } else if tokens.len() == 2 && i == tokens.len() - 2 { + b.push_str(" or "); + } else { + b.push_str(", "); + } + b.push_str(&a.to_string()); + b + }) + } + + let mut expected = edible.iter() + .map(|x| TokenType::Token(x.clone())) + .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) + .chain(self.expected_tokens.iter().cloned()) + .collect::>(); + expected.sort_by_cached_key(|x| x.to_string()); + expected.dedup(); + let expect = tokens_to_string(&expected[..]); + let actual = self.this_token_to_string(); + let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { + let short_expect = if expected.len() > 6 { + format!("{} possible tokens", expected.len()) + } else { + expect.clone() + }; + (format!("expected one of {}, found `{}`", expect, actual), + (self.sess.source_map().next_point(self.prev_span), + format!("expected one of {} here", short_expect))) + } else if expected.is_empty() { + (format!("unexpected token: `{}`", actual), + (self.prev_span, "unexpected token after this".to_string())) + } else { + (format!("expected {}, found `{}`", expect, actual), + (self.sess.source_map().next_point(self.prev_span), + format!("expected {} here", expect))) + }; + self.last_unexpected_token_span = Some(self.span); + let mut err = self.fatal(&msg_exp); + if self.token.is_ident_named("and") { + err.span_suggestion_short( + self.span, + "use `&&` instead of `and` for the boolean operator", + "&&".to_string(), + Applicability::MaybeIncorrect, + ); + } + if self.token.is_ident_named("or") { + err.span_suggestion_short( + self.span, + "use `||` instead of `or` for the boolean operator", + "||".to_string(), + Applicability::MaybeIncorrect, + ); + } + let sp = if self.token == token::Token::Eof { + // This is EOF, don't want to point at the following char, but rather the last token + self.prev_span + } else { + label_sp + }; + match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { + TokenType::Token(t) => Some(t.clone()), + _ => None, + }).collect::>(), err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } + + let is_semi_suggestable = expected.iter().any(|t| match t { + TokenType::Token(token::Semi) => true, // we expect a `;` here + _ => false, + }) && ( // a `;` would be expected before the current keyword + self.token.is_keyword(kw::Break) || + self.token.is_keyword(kw::Continue) || + self.token.is_keyword(kw::For) || + self.token.is_keyword(kw::If) || + self.token.is_keyword(kw::Let) || + self.token.is_keyword(kw::Loop) || + self.token.is_keyword(kw::Match) || + self.token.is_keyword(kw::Return) || + self.token.is_keyword(kw::While) + ); + let cm = self.sess.source_map(); + match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { + (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => { + // The spans are in different lines, expected `;` and found `let` or `return`. + // High likelihood that it is only a missing `;`. + err.span_suggestion_short( + label_sp, + "a semicolon may be missing here", + ";".to_string(), + Applicability::MaybeIncorrect, + ); + err.emit(); + return Ok(true); + } + (Ok(ref a), Ok(ref b)) if a.line == b.line => { + // When the spans are in the same line, it means that the only content between + // them is whitespace, point at the found token in that case: + // + // X | () => { syntax error }; + // | ^^^^^ expected one of 8 possible tokens here + // + // instead of having: + // + // X | () => { syntax error }; + // | -^^^^^ unexpected token + // | | + // | expected one of 8 possible tokens here + err.span_label(self.span, label_exp); + } + _ if self.prev_span == syntax_pos::DUMMY_SP => { + // Account for macro context where the previous span might not be + // available to avoid incorrect output (#54841). + err.span_label(self.span, "unexpected token"); + } + _ => { + err.span_label(sp, label_exp); + err.span_label(self.span, "unexpected token"); + } + } + Err(err) + } + + /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, + /// passes through any errors encountered. Used for error recovery. + crate fn eat_to_tokens(&mut self, kets: &[&token::Token]) { + let handler = self.diagnostic(); + + if let Err(ref mut err) = self.parse_seq_to_before_tokens( + kets, + SeqSep::none(), + TokenExpectType::Expect, + |p| Ok(p.parse_token_tree()), + ) { + handler.cancel(err); + } + } + + /// This function checks if there are trailing angle brackets and produces + /// a diagnostic to suggest removing them. + /// + /// ```ignore (diagnostic) + /// let _ = vec![1, 2, 3].into_iter().collect::>>>(); + /// ^^ help: remove extra angle brackets + /// ``` + crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) { + // This function is intended to be invoked after parsing a path segment where there are two + // cases: + // + // 1. A specific token is expected after the path segment. + // eg. `x.foo(`, `x.foo::(` (parenthesis - method call), + // `Foo::`, or `Foo::::` (mod sep - continued path). + // 2. No specific token is expected after the path segment. + // eg. `x.foo` (field access) + // + // This function is called after parsing `.foo` and before parsing the token `end` (if + // present). This includes any angle bracket arguments, such as `.foo::` or + // `Foo::`. + + // We only care about trailing angle brackets if we previously parsed angle bracket + // arguments. This helps stop us incorrectly suggesting that extra angle brackets be + // removed in this case: + // + // `x.foo >> (3)` (where `x.foo` is a `u32` for example) + // + // This case is particularly tricky as we won't notice it just looking at the tokens - + // it will appear the same (in terms of upcoming tokens) as below (since the `::` will + // have already been parsed): + // + // `x.foo::>>(3)` + let parsed_angle_bracket_args = segment.args + .as_ref() + .map(|args| args.is_angle_bracketed()) + .unwrap_or(false); + + debug!( + "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}", + parsed_angle_bracket_args, + ); + if !parsed_angle_bracket_args { + return; + } + + // Keep the span at the start so we can highlight the sequence of `>` characters to be + // removed. + let lo = self.span; + + // We need to look-ahead to see if we have `>` characters without moving the cursor forward + // (since we might have the field access case and the characters we're eating are + // actual operators and not trailing characters - ie `x.foo >> 3`). + let mut position = 0; + + // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how + // many of each (so we can correctly pluralize our error messages) and continue to + // advance. + let mut number_of_shr = 0; + let mut number_of_gt = 0; + while self.look_ahead(position, |t| { + trace!("check_trailing_angle_brackets: t={:?}", t); + if *t == token::BinOp(token::BinOpToken::Shr) { + number_of_shr += 1; + true + } else if *t == token::Gt { + number_of_gt += 1; + true + } else { + false + } + }) { + position += 1; + } + + // If we didn't find any trailing `>` characters, then we have nothing to error about. + debug!( + "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}", + number_of_gt, number_of_shr, + ); + if number_of_gt < 1 && number_of_shr < 1 { + return; + } + + // Finally, double check that we have our end token as otherwise this is the + // second case. + if self.look_ahead(position, |t| { + trace!("check_trailing_angle_brackets: t={:?}", t); + *t == end + }) { + // Eat from where we started until the end token so that parsing can continue + // as if we didn't have those extra angle brackets. + self.eat_to_tokens(&[&end]); + let span = lo.until(self.span); + + let plural = number_of_gt > 1 || number_of_shr >= 1; + self.diagnostic() + .struct_span_err( + span, + &format!("unmatched angle bracket{}", if plural { "s" } else { "" }), + ) + .span_suggestion( + span, + &format!("remove extra angle bracket{}", if plural { "s" } else { "" }), + String::new(), + Applicability::MachineApplicable, + ) + .emit(); + } + } + + /// Produce an error if comparison operators are chained (RFC #558). + /// We only need to check lhs, not rhs, because all comparison ops + /// have same precedence and are left-associative + crate fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) { + debug_assert!(outer_op.is_comparison(), + "check_no_chained_comparison: {:?} is not comparison", + outer_op); + match lhs.node { + ExprKind::Binary(op, _, _) if op.node.is_comparison() => { + // respan to include both operators + let op_span = op.span.to(self.span); + let mut err = self.diagnostic().struct_span_err(op_span, + "chained comparison operators require parentheses"); + if op.node == BinOpKind::Lt && + *outer_op == AssocOp::Less || // Include `<` to provide this recommendation + *outer_op == AssocOp::Greater // even in a case like the following: + { // Foo>> + err.help( + "use `::<...>` instead of `<...>` if you meant to specify type arguments"); + err.help("or use `(...)` if you meant to specify fn arguments"); + } + err.emit(); + } + _ => {} + } + } + crate fn maybe_report_ambiguous_plus( &mut self, allow_plus: bool, @@ -594,6 +1034,138 @@ impl<'a> Parser<'a> { } } + crate fn check_for_for_in_in_typo(&mut self, in_span: Span) { + if self.eat_keyword(kw::In) { + // a common typo: `for _ in in bar {}` + let mut err = self.sess.span_diagnostic.struct_span_err( + self.prev_span, + "expected iterable, found keyword `in`", + ); + err.span_suggestion_short( + in_span.until(self.prev_span), + "remove the duplicated `in`", + String::new(), + Applicability::MachineApplicable, + ); + err.emit(); + } + } + + crate fn expected_semi_or_open_brace(&mut self) -> PResult<'a, ast::TraitItem> { + let token_str = self.this_token_descr(); + let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); + err.span_label(self.span, "expected `;` or `{`"); + Err(err) + } + + crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { + if let token::DocComment(_) = self.token { + let mut err = self.diagnostic().struct_span_err( + self.span, + &format!("documentation comments cannot be applied to {}", applied_to), + ); + err.span_label(self.span, "doc comments are not allowed here"); + err.emit(); + self.bump(); + } else if self.token == token::Pound && self.look_ahead(1, |t| { + *t == token::OpenDelim(token::Bracket) + }) { + let lo = self.span; + // Skip every token until next possible arg. + while self.token != token::CloseDelim(token::Bracket) { + self.bump(); + } + let sp = lo.to(self.span); + self.bump(); + let mut err = self.diagnostic().struct_span_err( + sp, + &format!("attributes cannot be applied to {}", applied_to), + ); + err.span_label(sp, "attributes are not allowed here"); + err.emit(); + } + } + + crate fn argument_without_type( + &mut self, + err: &mut DiagnosticBuilder<'_>, + pat: P, + require_name: bool, + is_trait_item: bool, + ) { + // If we find a pattern followed by an identifier, it could be an (incorrect) + // C-style parameter declaration. + if self.check_ident() && self.look_ahead(1, |t| { + *t == token::Comma || *t == token::CloseDelim(token::Paren) + }) { + let ident = self.parse_ident().unwrap(); + let span = pat.span.with_hi(ident.span.hi()); + + err.span_suggestion( + span, + "declare the type after the parameter binding", + String::from(": "), + Applicability::HasPlaceholders, + ); + } else if require_name && is_trait_item { + if let PatKind::Ident(_, ident, _) = pat.node { + err.span_suggestion( + pat.span, + "explicitly ignore parameter", + format!("_: {}", ident), + Applicability::MachineApplicable, + ); + } + + err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); + } + } + + crate fn recover_arg_parse(&mut self) -> PResult<'a, (P, P)> { + let pat = self.parse_pat(Some("argument name"))?; + self.expect(&token::Colon)?; + let ty = self.parse_ty()?; + + let mut err = self.diagnostic().struct_span_err_with_code( + pat.span, + "patterns aren't allowed in methods without bodies", + DiagnosticId::Error("E0642".into()), + ); + err.span_suggestion_short( + pat.span, + "give this argument a name or use an underscore to ignore it", + "_".to_owned(), + Applicability::MachineApplicable, + ); + err.emit(); + + // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. + let pat = P(Pat { + node: PatKind::Wild, + span: pat.span, + id: ast::DUMMY_NODE_ID + }); + Ok((pat, ty)) + } + + crate fn recover_bad_self_arg( + &mut self, + mut arg: ast::Arg, + is_trait_item: bool, + ) -> PResult<'a, ast::Arg> { + let sp = arg.pat.span; + arg.ty.node = TyKind::Err; + let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function"); + if is_trait_item { + err.span_label(sp, "must be the first associated function parameter"); + } else { + err.span_label(sp, "not valid as function parameter"); + err.note("`self` is only valid as the first parameter of an associated function"); + } + err.emit(); + Ok(arg) + } + crate fn consume_block(&mut self, delim: token::DelimToken) { let mut brace_depth = 0; loop { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 56951ae0801..6c29437362c 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -47,14 +47,12 @@ use crate::parse::PResult; use crate::ThinVec; use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; use crate::symbol::{kw, sym, Symbol}; +use crate::parse::diagnostics::Error; use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError}; use rustc_target::spec::abi::{self, Abi}; -use syntax_pos::{ - BytePos, DUMMY_SP, FileName, MultiSpan, Span, - hygiene::CompilerDesugaringKind, -}; -use log::{debug, trace}; +use syntax_pos::{Span, BytePos, DUMMY_SP, FileName, hygiene::CompilerDesugaringKind}; +use log::debug; use std::borrow::Cow; use std::cmp; @@ -217,7 +215,7 @@ pub struct Parser<'a> { /// into modules, and sub-parsers have new values for this name. pub root_module_name: Option, crate expected_tokens: Vec, - token_cursor: TokenCursor, + crate token_cursor: TokenCursor, desugar_doc_comments: bool, /// Whether we should configure out of line modules as we parse. pub cfg_mods: bool, @@ -232,7 +230,7 @@ pub struct Parser<'a> { /// it gets removed from here. Every entry left at the end gets emitted as an independent /// error. crate unclosed_delims: Vec, - last_unexpected_token_span: Option, + crate last_unexpected_token_span: Option, /// If present, this `Parser` is not parsing Rust code but rather a macro call. crate subparser_name: Option<&'static str>, } @@ -245,19 +243,19 @@ impl<'a> Drop for Parser<'a> { } #[derive(Clone)] -struct TokenCursor { - frame: TokenCursorFrame, - stack: Vec, +crate struct TokenCursor { + crate frame: TokenCursorFrame, + crate stack: Vec, } #[derive(Clone)] -struct TokenCursorFrame { - delim: token::DelimToken, - span: DelimSpan, - open_delim: bool, - tree_cursor: tokenstream::Cursor, - close_delim: bool, - last_token: LastToken, +crate struct TokenCursorFrame { + crate delim: token::DelimToken, + crate span: DelimSpan, + crate open_delim: bool, + crate tree_cursor: tokenstream::Cursor, + crate close_delim: bool, + crate last_token: LastToken, } /// This is used in `TokenCursorFrame` above to track tokens that are consumed @@ -278,7 +276,7 @@ struct TokenCursorFrame { /// You can find some more example usage of this in the `collect_tokens` method /// on the parser. #[derive(Clone)] -enum LastToken { +crate enum LastToken { Collecting(Vec), Was(Option), } @@ -430,65 +428,6 @@ pub struct ModulePathSuccess { warn: bool, } -pub enum Error { - FileNotFoundForModule { - mod_name: String, - default_path: String, - secondary_path: String, - dir_path: String, - }, - DuplicatePaths { - mod_name: String, - default_path: String, - secondary_path: String, - }, - UselessDocComment, - InclusiveRangeWithNoEnd, -} - -impl Error { - fn span_err>(self, - sp: S, - handler: &errors::Handler) -> DiagnosticBuilder<'_> { - match self { - Error::FileNotFoundForModule { ref mod_name, - ref default_path, - ref secondary_path, - ref dir_path } => { - let mut err = struct_span_err!(handler, sp, E0583, - "file not found for module `{}`", mod_name); - err.help(&format!("name the file either {} or {} inside the directory \"{}\"", - default_path, - secondary_path, - dir_path)); - err - } - Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { - let mut err = struct_span_err!(handler, sp, E0584, - "file for module `{}` found at both {} and {}", - mod_name, - default_path, - secondary_path); - err.help("delete or rename one of them to remove the ambiguity"); - err - } - Error::UselessDocComment => { - let mut err = struct_span_err!(handler, sp, E0585, - "found a documentation comment that doesn't document anything"); - err.help("doc comments must come before what they document, maybe a comment was \ - intended with `//`?"); - err - } - Error::InclusiveRangeWithNoEnd => { - let mut err = struct_span_err!(handler, sp, E0586, - "inclusive range with no end"); - err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); - err - } - } - } -} - #[derive(Debug)] enum LhsExpr { NotYetParsed, @@ -529,7 +468,7 @@ fn dummy_arg(span: Span) -> Arg { } #[derive(Copy, Clone, Debug)] -enum TokenExpectType { +crate enum TokenExpectType { Expect, NoExpect, } @@ -610,7 +549,7 @@ impl<'a> Parser<'a> { pprust::token_to_string(&self.token) } - fn token_descr(&self) -> Option<&'static str> { + crate fn token_descr(&self) -> Option<&'static str> { Some(match &self.token { t if t.is_special_ident() => "reserved identifier", t if t.is_used_keyword() => "keyword", @@ -657,23 +596,6 @@ impl<'a> Parser<'a> { edible: &[token::Token], inedible: &[token::Token], ) -> PResult<'a, bool /* recovered */> { - fn tokens_to_string(tokens: &[TokenType]) -> String { - let mut i = tokens.iter(); - // This might be a sign we need a connect method on Iterator. - let b = i.next() - .map_or(String::new(), |t| t.to_string()); - i.enumerate().fold(b, |mut b, (i, a)| { - if tokens.len() > 2 && i == tokens.len() - 2 { - b.push_str(", or "); - } else if tokens.len() == 2 && i == tokens.len() - 2 { - b.push_str(" or "); - } else { - b.push_str(", "); - } - b.push_str(&a.to_string()); - b - }) - } if edible.contains(&self.token) { self.bump(); Ok(false) @@ -683,127 +605,15 @@ impl<'a> Parser<'a> { } else if self.last_unexpected_token_span == Some(self.span) { FatalError.raise(); } else { - let mut expected = edible.iter() - .map(|x| TokenType::Token(x.clone())) - .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) - .chain(self.expected_tokens.iter().cloned()) - .collect::>(); - expected.sort_by_cached_key(|x| x.to_string()); - expected.dedup(); - let expect = tokens_to_string(&expected[..]); - let actual = self.this_token_to_string(); - let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { - let short_expect = if expected.len() > 6 { - format!("{} possible tokens", expected.len()) - } else { - expect.clone() - }; - (format!("expected one of {}, found `{}`", expect, actual), - (self.sess.source_map().next_point(self.prev_span), - format!("expected one of {} here", short_expect))) - } else if expected.is_empty() { - (format!("unexpected token: `{}`", actual), - (self.prev_span, "unexpected token after this".to_string())) - } else { - (format!("expected {}, found `{}`", expect, actual), - (self.sess.source_map().next_point(self.prev_span), - format!("expected {} here", expect))) - }; - self.last_unexpected_token_span = Some(self.span); - let mut err = self.fatal(&msg_exp); - if self.token.is_ident_named("and") { - err.span_suggestion_short( - self.span, - "use `&&` instead of `and` for the boolean operator", - "&&".to_string(), - Applicability::MaybeIncorrect, - ); - } - if self.token.is_ident_named("or") { - err.span_suggestion_short( - self.span, - "use `||` instead of `or` for the boolean operator", - "||".to_string(), - Applicability::MaybeIncorrect, - ); - } - let sp = if self.token == token::Token::Eof { - // This is EOF, don't want to point at the following char, but rather the last token - self.prev_span - } else { - label_sp - }; - match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { - TokenType::Token(t) => Some(t.clone()), - _ => None, - }).collect::>(), err) { - Err(e) => err = e, - Ok(recovered) => { - return Ok(recovered); - } - } - - let is_semi_suggestable = expected.iter().any(|t| match t { - TokenType::Token(token::Semi) => true, // we expect a `;` here - _ => false, - }) && ( // a `;` would be expected before the current keyword - self.token.is_keyword(kw::Break) || - self.token.is_keyword(kw::Continue) || - self.token.is_keyword(kw::For) || - self.token.is_keyword(kw::If) || - self.token.is_keyword(kw::Let) || - self.token.is_keyword(kw::Loop) || - self.token.is_keyword(kw::Match) || - self.token.is_keyword(kw::Return) || - self.token.is_keyword(kw::While) - ); - let cm = self.sess.source_map(); - match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { - (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => { - // The spans are in different lines, expected `;` and found `let` or `return`. - // High likelihood that it is only a missing `;`. - err.span_suggestion_short( - label_sp, - "a semicolon may be missing here", - ";".to_string(), - Applicability::MaybeIncorrect, - ); - err.emit(); - return Ok(true); - } - (Ok(ref a), Ok(ref b)) if a.line == b.line => { - // When the spans are in the same line, it means that the only content between - // them is whitespace, point at the found token in that case: - // - // X | () => { syntax error }; - // | ^^^^^ expected one of 8 possible tokens here - // - // instead of having: - // - // X | () => { syntax error }; - // | -^^^^^ unexpected token - // | | - // | expected one of 8 possible tokens here - err.span_label(self.span, label_exp); - } - _ if self.prev_span == DUMMY_SP => { - // Account for macro context where the previous span might not be - // available to avoid incorrect output (#54841). - err.span_label(self.span, "unexpected token"); - } - _ => { - err.span_label(sp, label_exp); - err.span_label(self.span, "unexpected token"); - } - } - Err(err) + self.expected_one_of_not_found(edible, inedible) } } /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. - fn interpolated_or_expr_span(&self, - expr: PResult<'a, P>) - -> PResult<'a, (Span, P)> { + fn interpolated_or_expr_span( + &self, + expr: PResult<'a, P>, + ) -> PResult<'a, (Span, P)> { expr.map(|e| { if self.prev_token_kind == PrevTokenKind::Interpolated { (self.prev_span, e) @@ -813,36 +623,6 @@ impl<'a> Parser<'a> { }) } - fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { - let mut err = self.struct_span_err(self.span, - &format!("expected identifier, found {}", - self.this_token_descr())); - if let token::Ident(ident, false) = &self.token { - if ident.is_raw_guess() { - err.span_suggestion( - self.span, - "you can escape reserved keywords to use them as identifiers", - format!("r#{}", ident), - Applicability::MaybeIncorrect, - ); - } - } - if let Some(token_descr) = self.token_descr() { - err.span_label(self.span, format!("expected identifier, found {}", token_descr)); - } else { - err.span_label(self.span, "expected identifier"); - if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { - err.span_suggestion( - self.span, - "remove this comma", - String::new(), - Applicability::MachineApplicable, - ); - } - } - err - } - pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { self.parse_ident_common(true) } @@ -925,7 +705,7 @@ impl<'a> Parser<'a> { } } - fn check_ident(&mut self) -> bool { + crate fn check_ident(&mut self) -> bool { if self.token.is_ident() { true } else { @@ -1115,19 +895,6 @@ impl<'a> Parser<'a> { } } - /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, - /// passes through any errors encountered. Used for error recovery. - fn eat_to_tokens(&mut self, kets: &[&token::Token]) { - let handler = self.diagnostic(); - - if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets, - SeqSep::none(), - TokenExpectType::Expect, - |p| Ok(p.parse_token_tree())) { - handler.cancel(err); - } - } - /// Parses a sequence, including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. @@ -1159,7 +926,7 @@ impl<'a> Parser<'a> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } - fn parse_seq_to_before_tokens( + crate fn parse_seq_to_before_tokens( &mut self, kets: &[&token::Token], sep: SeqSep, @@ -1319,35 +1086,6 @@ impl<'a> Parser<'a> { None => self.look_ahead_span(dist - 1), } } - pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { - self.sess.span_diagnostic.struct_span_fatal(self.span, m) - } - pub fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { - self.sess.span_diagnostic.struct_span_fatal(sp, m) - } - fn span_fatal_err>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { - err.span_err(sp, self.diagnostic()) - } - fn bug(&self, m: &str) -> ! { - self.sess.span_diagnostic.span_bug(self.span, m) - } - fn span_err>(&self, sp: S, m: &str) { - self.sess.span_diagnostic.span_err(sp, m) - } - crate fn struct_span_err>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { - self.sess.span_diagnostic.struct_span_err(sp, m) - } - crate fn span_bug>(&self, sp: S, m: &str) -> ! { - self.sess.span_diagnostic.span_bug(sp, m) - } - - fn cancel(&self, err: &mut DiagnosticBuilder<'_>) { - self.sess.span_diagnostic.cancel(err) - } - - crate fn diagnostic(&self) -> &'a errors::Handler { - &self.sess.span_diagnostic - } /// Is the current token one of the keywords that signals a bare function type? fn token_is_bare_fn_keyword(&mut self) -> bool { @@ -1507,20 +1245,12 @@ impl<'a> Parser<'a> { Some(body) } _ => { - let token_str = self.this_token_descr(); - let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", - token_str)); - err.span_label(self.span, "expected `;` or `{`"); - return Err(err); + return self.expected_semi_or_open_brace(); } } } _ => { - let token_str = self.this_token_descr(); - let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", - token_str)); - err.span_label(self.span, "expected `;` or `{`"); - return Err(err); + return self.expected_semi_or_open_brace(); } }; (ident, ast::TraitItemKind::Method(sig, body), generics) @@ -1776,79 +1506,24 @@ impl<'a> Parser<'a> { /// Skips unexpected attributes and doc comments in this position and emits an appropriate /// error. - fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { - if let token::DocComment(_) = self.token { - let mut err = self.diagnostic().struct_span_err( - self.span, - &format!("documentation comments cannot be applied to {}", applied_to), - ); - err.span_label(self.span, "doc comments are not allowed here"); - err.emit(); - self.bump(); - } else if self.token == token::Pound && self.look_ahead(1, |t| { - *t == token::OpenDelim(token::Bracket) - }) { - let lo = self.span; - // Skip every token until next possible arg. - while self.token != token::CloseDelim(token::Bracket) { - self.bump(); - } - let sp = lo.to(self.span); - self.bump(); - let mut err = self.diagnostic().struct_span_err( - sp, - &format!("attributes cannot be applied to {}", applied_to), - ); - err.span_label(sp, "attributes are not allowed here"); - err.emit(); - } - } - /// This version of parse arg doesn't necessarily require identifier names. - fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool, - allow_c_variadic: bool) -> PResult<'a, Arg> { - if let Ok(Some(_)) = self.parse_self_arg() { - let mut err = self.struct_span_err(self.prev_span, - "unexpected `self` argument in function"); - err.span_label(self.prev_span, - "`self` is only valid as the first argument of an associated function"); - return Err(err); + fn parse_arg_general( + &mut self, + require_name: bool, + is_trait_item: bool, + allow_c_variadic: bool, + ) -> PResult<'a, Arg> { + if let Ok(Some(arg)) = self.parse_self_arg() { + return self.recover_bad_self_arg(arg, is_trait_item); } let (pat, ty) = if require_name || self.is_named_argument() { - debug!("parse_arg_general parse_pat (require_name:{})", - require_name); + debug!("parse_arg_general parse_pat (require_name:{})", require_name); self.eat_incorrect_doc_comment("method arguments"); let pat = self.parse_pat(Some("argument name"))?; if let Err(mut err) = self.expect(&token::Colon) { - // If we find a pattern followed by an identifier, it could be an (incorrect) - // C-style parameter declaration. - if self.check_ident() && self.look_ahead(1, |t| { - *t == token::Comma || *t == token::CloseDelim(token::Paren) - }) { - let ident = self.parse_ident().unwrap(); - let span = pat.span.with_hi(ident.span.hi()); - - err.span_suggestion( - span, - "declare the type after the parameter binding", - String::from(": "), - Applicability::HasPlaceholders, - ); - } else if require_name && is_trait_item { - if let PatKind::Ident(_, ident, _) = pat.node { - err.span_suggestion( - pat.span, - "explicitly ignore parameter", - format!("_: {}", ident), - Applicability::MachineApplicable, - ); - } - - err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); - } - + self.argument_without_type(&mut err, pat, require_name, is_trait_item); return Err(err); } @@ -1885,30 +1560,7 @@ impl<'a> Parser<'a> { // Recover from attempting to parse the argument as a type without pattern. err.cancel(); mem::replace(self, parser_snapshot_before_ty); - let pat = self.parse_pat(Some("argument name"))?; - self.expect(&token::Colon)?; - let ty = self.parse_ty()?; - - let mut err = self.diagnostic().struct_span_err_with_code( - pat.span, - "patterns aren't allowed in methods without bodies", - DiagnosticId::Error("E0642".into()), - ); - err.span_suggestion_short( - pat.span, - "give this argument a name or use an underscore to ignore it", - "_".to_owned(), - Applicability::MachineApplicable, - ); - err.emit(); - - // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. - let pat = P(Pat { - node: PatKind::Wild, - span: pat.span, - id: ast::DUMMY_NODE_ID - }); - (pat, ty) + self.recover_arg_parse()? } } }; @@ -1916,11 +1568,6 @@ impl<'a> Parser<'a> { Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID, source: ast::ArgSource::Normal }) } - /// Parses a single function argument. - crate fn parse_arg(&mut self) -> PResult<'a, Arg> { - self.parse_arg_general(true, false, false) - } - /// Parses an argument in a lambda header (e.g., `|arg, arg|`). fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { let pat = self.parse_pat(Some("argument name"))?; @@ -2885,116 +2532,6 @@ impl<'a> Parser<'a> { }) } - /// This function checks if there are trailing angle brackets and produces - /// a diagnostic to suggest removing them. - /// - /// ```ignore (diagnostic) - /// let _ = vec![1, 2, 3].into_iter().collect::>>>(); - /// ^^ help: remove extra angle brackets - /// ``` - fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) { - // This function is intended to be invoked after parsing a path segment where there are two - // cases: - // - // 1. A specific token is expected after the path segment. - // eg. `x.foo(`, `x.foo::(` (parenthesis - method call), - // `Foo::`, or `Foo::::` (mod sep - continued path). - // 2. No specific token is expected after the path segment. - // eg. `x.foo` (field access) - // - // This function is called after parsing `.foo` and before parsing the token `end` (if - // present). This includes any angle bracket arguments, such as `.foo::` or - // `Foo::`. - - // We only care about trailing angle brackets if we previously parsed angle bracket - // arguments. This helps stop us incorrectly suggesting that extra angle brackets be - // removed in this case: - // - // `x.foo >> (3)` (where `x.foo` is a `u32` for example) - // - // This case is particularly tricky as we won't notice it just looking at the tokens - - // it will appear the same (in terms of upcoming tokens) as below (since the `::` will - // have already been parsed): - // - // `x.foo::>>(3)` - let parsed_angle_bracket_args = segment.args - .as_ref() - .map(|args| args.is_angle_bracketed()) - .unwrap_or(false); - - debug!( - "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}", - parsed_angle_bracket_args, - ); - if !parsed_angle_bracket_args { - return; - } - - // Keep the span at the start so we can highlight the sequence of `>` characters to be - // removed. - let lo = self.span; - - // We need to look-ahead to see if we have `>` characters without moving the cursor forward - // (since we might have the field access case and the characters we're eating are - // actual operators and not trailing characters - ie `x.foo >> 3`). - let mut position = 0; - - // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how - // many of each (so we can correctly pluralize our error messages) and continue to - // advance. - let mut number_of_shr = 0; - let mut number_of_gt = 0; - while self.look_ahead(position, |t| { - trace!("check_trailing_angle_brackets: t={:?}", t); - if *t == token::BinOp(token::BinOpToken::Shr) { - number_of_shr += 1; - true - } else if *t == token::Gt { - number_of_gt += 1; - true - } else { - false - } - }) { - position += 1; - } - - // If we didn't find any trailing `>` characters, then we have nothing to error about. - debug!( - "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}", - number_of_gt, number_of_shr, - ); - if number_of_gt < 1 && number_of_shr < 1 { - return; - } - - // Finally, double check that we have our end token as otherwise this is the - // second case. - if self.look_ahead(position, |t| { - trace!("check_trailing_angle_brackets: t={:?}", t); - *t == end - }) { - // Eat from where we started until the end token so that parsing can continue - // as if we didn't have those extra angle brackets. - self.eat_to_tokens(&[&end]); - let span = lo.until(self.span); - - let plural = number_of_gt > 1 || number_of_shr >= 1; - self.diagnostic() - .struct_span_err( - span, - &format!("unmatched angle bracket{}", if plural { "s" } else { "" }), - ) - .span_suggestion( - span, - &format!("remove extra angle bracket{}", if plural { "s" } else { "" }), - String::new(), - Applicability::MachineApplicable, - ) - .emit(); - } - } - fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, P> { let mut e = e0; let mut hi; @@ -3556,33 +3093,6 @@ impl<'a> Parser<'a> { } } - /// Produce an error if comparison operators are chained (RFC #558). - /// We only need to check lhs, not rhs, because all comparison ops - /// have same precedence and are left-associative - fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) { - debug_assert!(outer_op.is_comparison(), - "check_no_chained_comparison: {:?} is not comparison", - outer_op); - match lhs.node { - ExprKind::Binary(op, _, _) if op.node.is_comparison() => { - // respan to include both operators - let op_span = op.span.to(self.span); - let mut err = self.diagnostic().struct_span_err(op_span, - "chained comparison operators require parentheses"); - if op.node == BinOpKind::Lt && - *outer_op == AssocOp::Less || // Include `<` to provide this recommendation - *outer_op == AssocOp::Greater // even in a case like the following: - { // Foo>> - err.help( - "use `::<...>` instead of `<...>` if you meant to specify type arguments"); - err.help("or use `(...)` if you meant to specify fn arguments"); - } - err.emit(); - } - _ => {} - } - } - /// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr` fn parse_prefix_range_expr(&mut self, already_parsed_attrs: Option>) @@ -3609,7 +3119,7 @@ impl<'a> Parser<'a> { hi = x.span; x })?) - } else { + } else { None }; let limits = if tok == token::DotDot { @@ -3759,20 +3269,7 @@ impl<'a> Parser<'a> { err.emit(); } let in_span = self.prev_span; - if self.eat_keyword(kw::In) { - // a common typo: `for _ in in bar {}` - let mut err = self.sess.span_diagnostic.struct_span_err( - self.prev_span, - "expected iterable, found keyword `in`", - ); - err.span_suggestion_short( - in_span.until(self.prev_span), - "remove the duplicated `in`", - String::new(), - Applicability::MachineApplicable, - ); - err.emit(); - } + self.check_for_for_in_in_typo(in_span); let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); @@ -6354,7 +5851,9 @@ impl<'a> Parser<'a> { let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?; let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; - let mut decl = self.parse_fn_decl_with_self(|p| p.parse_arg())?; + let mut decl = self.parse_fn_decl_with_self(|p| { + p.parse_arg_general(true, true, false) + })?; generics.where_clause = self.parse_where_clause()?; self.construct_async_arguments(&mut asyncness, &mut decl); *at_end = true; diff --git a/src/test/ui/invalid-self-argument/bare-fn-start.rs b/src/test/ui/invalid-self-argument/bare-fn-start.rs index 741ba5f41ce..a003a01941b 100644 --- a/src/test/ui/invalid-self-argument/bare-fn-start.rs +++ b/src/test/ui/invalid-self-argument/bare-fn-start.rs @@ -1,5 +1,6 @@ fn a(&self) { } -//~^ ERROR unexpected `self` argument in function -//~| NOTE `self` is only valid as the first argument of an associated function +//~^ ERROR unexpected `self` parameter in function +//~| NOTE not valid as function parameter +//~| NOTE `self` is only valid as the first parameter of an associated function fn main() { } diff --git a/src/test/ui/invalid-self-argument/bare-fn-start.stderr b/src/test/ui/invalid-self-argument/bare-fn-start.stderr index 6a878b619d8..23de6502094 100644 --- a/src/test/ui/invalid-self-argument/bare-fn-start.stderr +++ b/src/test/ui/invalid-self-argument/bare-fn-start.stderr @@ -1,8 +1,10 @@ -error: unexpected `self` argument in function - --> $DIR/bare-fn-start.rs:1:7 +error: unexpected `self` parameter in function + --> $DIR/bare-fn-start.rs:1:6 | LL | fn a(&self) { } - | ^^^^ `self` is only valid as the first argument of an associated function + | ^^^^^ not valid as function parameter + | + = note: `self` is only valid as the first parameter of an associated function error: aborting due to previous error diff --git a/src/test/ui/invalid-self-argument/bare-fn.rs b/src/test/ui/invalid-self-argument/bare-fn.rs index 704fa996ca6..73d68e8b7a5 100644 --- a/src/test/ui/invalid-self-argument/bare-fn.rs +++ b/src/test/ui/invalid-self-argument/bare-fn.rs @@ -1,5 +1,6 @@ fn b(foo: u32, &mut self) { } -//~^ ERROR unexpected `self` argument in function -//~| NOTE `self` is only valid as the first argument of an associated function +//~^ ERROR unexpected `self` parameter in function +//~| NOTE not valid as function parameter +//~| NOTE `self` is only valid as the first parameter of an associated function fn main() { } diff --git a/src/test/ui/invalid-self-argument/bare-fn.stderr b/src/test/ui/invalid-self-argument/bare-fn.stderr index b13f746a4ec..601a51bb4a9 100644 --- a/src/test/ui/invalid-self-argument/bare-fn.stderr +++ b/src/test/ui/invalid-self-argument/bare-fn.stderr @@ -1,8 +1,10 @@ -error: unexpected `self` argument in function - --> $DIR/bare-fn.rs:1:21 +error: unexpected `self` parameter in function + --> $DIR/bare-fn.rs:1:16 | LL | fn b(foo: u32, &mut self) { } - | ^^^^ `self` is only valid as the first argument of an associated function + | ^^^^^^^^^ not valid as function parameter + | + = note: `self` is only valid as the first parameter of an associated function error: aborting due to previous error diff --git a/src/test/ui/invalid-self-argument/trait-fn.rs b/src/test/ui/invalid-self-argument/trait-fn.rs index 31e867bc764..1e8220d7b4a 100644 --- a/src/test/ui/invalid-self-argument/trait-fn.rs +++ b/src/test/ui/invalid-self-argument/trait-fn.rs @@ -2,8 +2,8 @@ struct Foo {} impl Foo { fn c(foo: u32, self) {} - //~^ ERROR unexpected `self` argument in function - //~| NOTE `self` is only valid as the first argument of an associated function + //~^ ERROR unexpected `self` parameter in function + //~| NOTE must be the first associated function parameter fn good(&mut self, foo: u32) {} } diff --git a/src/test/ui/invalid-self-argument/trait-fn.stderr b/src/test/ui/invalid-self-argument/trait-fn.stderr index b3c2cc5b5eb..96a2251c036 100644 --- a/src/test/ui/invalid-self-argument/trait-fn.stderr +++ b/src/test/ui/invalid-self-argument/trait-fn.stderr @@ -1,8 +1,8 @@ -error: unexpected `self` argument in function +error: unexpected `self` parameter in function --> $DIR/trait-fn.rs:4:20 | LL | fn c(foo: u32, self) {} - | ^^^^ `self` is only valid as the first argument of an associated function + | ^^^^ must be the first associated function parameter error: aborting due to previous error diff --git a/src/test/ui/issues/issue-60989.rs b/src/test/ui/issues/issue-60989.rs new file mode 100644 index 00000000000..930e98bedce --- /dev/null +++ b/src/test/ui/issues/issue-60989.rs @@ -0,0 +1,18 @@ +struct A {} +struct B {} + +impl From for B { + fn from(a: A) -> B { + B{} + } +} + +fn main() { + let c1 = (); + c1::<()>; + //~^ ERROR type arguments are not allowed for this type + + let c1 = A {}; + c1::>; + //~^ ERROR type arguments are not allowed for this type +} diff --git a/src/test/ui/issues/issue-60989.stderr b/src/test/ui/issues/issue-60989.stderr new file mode 100644 index 00000000000..55a0b9626df --- /dev/null +++ b/src/test/ui/issues/issue-60989.stderr @@ -0,0 +1,15 @@ +error[E0109]: type arguments are not allowed for this type + --> $DIR/issue-60989.rs:12:10 + | +LL | c1::<()>; + | ^^ type argument not allowed + +error[E0109]: type arguments are not allowed for this type + --> $DIR/issue-60989.rs:16:10 + | +LL | c1::>; + | ^^^^^^^ type argument not allowed + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0109`. diff --git a/src/test/ui/issues/issue-61108.rs b/src/test/ui/issues/issue-61108.rs new file mode 100644 index 00000000000..0a883b95818 --- /dev/null +++ b/src/test/ui/issues/issue-61108.rs @@ -0,0 +1,7 @@ +fn main() { + let mut bad_letters = vec!['e', 't', 'o', 'i']; + for l in bad_letters { + // something here + } + bad_letters.push('s'); //~ ERROR borrow of moved value: `bad_letters` +} diff --git a/src/test/ui/issues/issue-61108.stderr b/src/test/ui/issues/issue-61108.stderr new file mode 100644 index 00000000000..8523a6f6548 --- /dev/null +++ b/src/test/ui/issues/issue-61108.stderr @@ -0,0 +1,17 @@ +error[E0382]: borrow of moved value: `bad_letters` + --> $DIR/issue-61108.rs:6:5 + | +LL | let mut bad_letters = vec!['e', 't', 'o', 'i']; + | --------------- move occurs because `bad_letters` has type `std::vec::Vec`, which does not implement the `Copy` trait +LL | for l in bad_letters { + | ----------- + | | + | value moved here + | help: consider borrowing to avoid moving into the for loop: `&bad_letters` +... +LL | bad_letters.push('s'); + | ^^^^^^^^^^^ value borrowed here after move + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0382`. diff --git a/src/test/ui/parser/self-in-function-arg.rs b/src/test/ui/parser/self-in-function-arg.rs new file mode 100644 index 00000000000..6172ffe1b03 --- /dev/null +++ b/src/test/ui/parser/self-in-function-arg.rs @@ -0,0 +1,3 @@ +fn foo(x:i32, self: i32) -> i32 { self } //~ ERROR unexpected `self` parameter in function + +fn main() {} diff --git a/src/test/ui/parser/self-in-function-arg.stderr b/src/test/ui/parser/self-in-function-arg.stderr new file mode 100644 index 00000000000..f58df9b9e79 --- /dev/null +++ b/src/test/ui/parser/self-in-function-arg.stderr @@ -0,0 +1,10 @@ +error: unexpected `self` parameter in function + --> $DIR/self-in-function-arg.rs:1:15 + | +LL | fn foo(x:i32, self: i32) -> i32 { self } + | ^^^^ not valid as function parameter + | + = note: `self` is only valid as the first parameter of an associated function + +error: aborting due to previous error + diff --git a/src/test/ui/suggestions/borrow-for-loop-head.stderr b/src/test/ui/suggestions/borrow-for-loop-head.stderr index 10287f59cce..36bced9e433 100644 --- a/src/test/ui/suggestions/borrow-for-loop-head.stderr +++ b/src/test/ui/suggestions/borrow-for-loop-head.stderr @@ -13,11 +13,10 @@ LL | let a = vec![1, 2, 3]; | - move occurs because `a` has type `std::vec::Vec`, which does not implement the `Copy` trait LL | for i in &a { LL | for j in a { - | ^ value moved here, in previous iteration of loop -help: consider borrowing this to avoid moving it into the for loop - | -LL | for j in &a { - | ^^ + | ^ + | | + | value moved here, in previous iteration of loop + | help: consider borrowing to avoid moving into the for loop: `&a` error: aborting due to 2 previous errors