move syntax::parse -> librustc_parse
also move MACRO_ARGUMENTS -> librustc_parse
This commit is contained in:
parent
be023ebe85
commit
4ae2728fa8
26
Cargo.lock
26
Cargo.lock
@ -3504,6 +3504,7 @@ dependencies = [
|
||||
"rustc_lint",
|
||||
"rustc_metadata",
|
||||
"rustc_mir",
|
||||
"rustc_parse",
|
||||
"rustc_plugin",
|
||||
"rustc_plugin_impl",
|
||||
"rustc_save_analysis",
|
||||
@ -3571,6 +3572,7 @@ dependencies = [
|
||||
"rustc_lint",
|
||||
"rustc_metadata",
|
||||
"rustc_mir",
|
||||
"rustc_parse",
|
||||
"rustc_passes",
|
||||
"rustc_plugin_impl",
|
||||
"rustc_privacy",
|
||||
@ -3648,6 +3650,7 @@ dependencies = [
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_index",
|
||||
"rustc_parse",
|
||||
"rustc_target",
|
||||
"serialize",
|
||||
"smallvec 1.0.0",
|
||||
@ -3691,6 +3694,21 @@ dependencies = [
|
||||
"core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_parse"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"log",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_lexer",
|
||||
"rustc_target",
|
||||
"smallvec 1.0.0",
|
||||
"syntax",
|
||||
"syntax_pos",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_passes"
|
||||
version = "0.0.0"
|
||||
@ -3700,6 +3718,7 @@ dependencies = [
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_index",
|
||||
"rustc_parse",
|
||||
"rustc_target",
|
||||
"syntax",
|
||||
"syntax_pos",
|
||||
@ -3762,6 +3781,7 @@ dependencies = [
|
||||
"rustc",
|
||||
"rustc_codegen_utils",
|
||||
"rustc_data_structures",
|
||||
"rustc_parse",
|
||||
"serde_json",
|
||||
"syntax",
|
||||
"syntax_pos",
|
||||
@ -4371,14 +4391,11 @@ dependencies = [
|
||||
name = "syntax_expand"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"lazy_static 1.3.0",
|
||||
"log",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_index",
|
||||
"rustc_lexer",
|
||||
"scoped-tls",
|
||||
"rustc_parse",
|
||||
"serialize",
|
||||
"smallvec 1.0.0",
|
||||
"syntax",
|
||||
@ -4393,6 +4410,7 @@ dependencies = [
|
||||
"log",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_parse",
|
||||
"rustc_target",
|
||||
"smallvec 1.0.0",
|
||||
"syntax",
|
||||
|
@ -21,6 +21,7 @@ rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_metadata = { path = "../librustc_metadata" }
|
||||
rustc_mir = { path = "../librustc_mir" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
rustc_plugin = { path = "../librustc_plugin/deprecated" } # To get this in the sysroot
|
||||
rustc_plugin_impl = { path = "../librustc_plugin" }
|
||||
rustc_save_analysis = { path = "../librustc_save_analysis" }
|
||||
|
@ -65,7 +65,6 @@ use std::time::Instant;
|
||||
use syntax::ast;
|
||||
use syntax::source_map::FileLoader;
|
||||
use syntax::feature_gate::{GatedCfg, UnstableFeatures};
|
||||
use syntax::parse;
|
||||
use syntax::symbol::sym;
|
||||
use syntax_pos::{DUMMY_SP, FileName};
|
||||
|
||||
@ -1096,14 +1095,16 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
||||
}
|
||||
|
||||
fn parse_crate_attrs<'a>(sess: &'a Session, input: &Input) -> PResult<'a, Vec<ast::Attribute>> {
|
||||
match *input {
|
||||
Input::File(ref ifile) => {
|
||||
parse::parse_crate_attrs_from_file(ifile, &sess.parse_sess)
|
||||
match input {
|
||||
Input::File(ifile) => {
|
||||
rustc_parse::parse_crate_attrs_from_file(ifile, &sess.parse_sess)
|
||||
}
|
||||
Input::Str { ref name, ref input } => {
|
||||
parse::parse_crate_attrs_from_source_str(name.clone(),
|
||||
input.clone(),
|
||||
&sess.parse_sess)
|
||||
Input::Str { name, input } => {
|
||||
rustc_parse::parse_crate_attrs_from_source_str(
|
||||
name.clone(),
|
||||
input.clone(),
|
||||
&sess.parse_sess,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_ext = { path = "../libsyntax_ext" }
|
||||
syntax_expand = { path = "../libsyntax_expand" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
rustc_serialize = { path = "../libserialize", package = "serialize" }
|
||||
rustc = { path = "../librustc" }
|
||||
|
@ -11,11 +11,11 @@ use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||
use rustc_data_structures::OnDrop;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use std::path::PathBuf;
|
||||
use std::result;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use syntax::ast::{self, MetaItemKind};
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::token;
|
||||
use syntax::source_map::{FileName, FileLoader, SourceMap};
|
||||
use syntax::sess::ParseSess;
|
||||
|
@ -26,6 +26,7 @@ use rustc_errors::PResult;
|
||||
use rustc_incremental;
|
||||
use rustc_metadata::cstore;
|
||||
use rustc_mir as mir;
|
||||
use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str};
|
||||
use rustc_passes::{self, ast_validation, hir_stats, layout_test};
|
||||
use rustc_plugin as plugin;
|
||||
use rustc_plugin::registry::Registry;
|
||||
@ -37,7 +38,6 @@ use syntax::{self, ast, visit};
|
||||
use syntax::early_buffered_lints::BufferedEarlyLint;
|
||||
use syntax_expand::base::{NamedSyntaxExtension, ExtCtxt};
|
||||
use syntax::mut_visit::MutVisitor;
|
||||
use syntax::parse;
|
||||
use syntax::util::node_count::NodeCounter;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::FileName;
|
||||
@ -60,12 +60,11 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
|
||||
let krate = time(sess, "parsing", || {
|
||||
let _prof_timer = sess.prof.generic_activity("parse_crate");
|
||||
|
||||
match *input {
|
||||
Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
|
||||
Input::Str {
|
||||
ref input,
|
||||
ref name,
|
||||
} => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
|
||||
match input {
|
||||
Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
|
||||
Input::Str { input, name } => {
|
||||
parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess)
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
@ -484,7 +483,7 @@ pub fn lower_to_hir(
|
||||
) -> Result<hir::map::Forest> {
|
||||
// Lower AST to HIR.
|
||||
let hir_forest = time(sess, "lowering AST -> HIR", || {
|
||||
let nt_to_tokenstream = syntax::parse::nt_to_tokenstream;
|
||||
let nt_to_tokenstream = rustc_parse::nt_to_tokenstream;
|
||||
let hir_crate = lower_crate(sess, &dep_graph, &krate, resolver, nt_to_tokenstream);
|
||||
|
||||
if sess.opts.debugging_opts.hir_stats {
|
||||
|
@ -50,6 +50,7 @@ pub fn diagnostics_registry() -> Registry {
|
||||
// FIXME: need to figure out a way to get these back in here
|
||||
// all_errors.extend_from_slice(get_codegen_backend(sess).diagnostics());
|
||||
all_errors.extend_from_slice(&rustc_metadata::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_parse::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_passes::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_plugin::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_mir::error_codes::DIAGNOSTICS);
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Low-level Rust lexer.
|
||||
//!
|
||||
//! Tokens produced by this lexer are not yet ready for parsing the Rust syntax,
|
||||
//! for that see `libsyntax::parse::lexer`, which converts this basic token stream
|
||||
//! for that see `librustc_parse::lexer`, which converts this basic token stream
|
||||
//! into wide tokens used by actual parser.
|
||||
//!
|
||||
//! The purpose of this crate is to convert raw sources into a labeled sequence
|
||||
|
@ -23,4 +23,5 @@ rustc_serialize = { path = "../libserialize", package = "serialize" }
|
||||
stable_deref_trait = "1.0.0"
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_expand = { path = "../libsyntax_expand" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
|
@ -18,6 +18,8 @@ use rustc::hir::map::{DefKey, DefPath, DefPathHash};
|
||||
use rustc::hir::map::definitions::DefPathTable;
|
||||
use rustc::util::nodemap::DefIdMap;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_parse::source_file_to_stream;
|
||||
use rustc_parse::parser::emit_unclosed_delims;
|
||||
|
||||
use smallvec::SmallVec;
|
||||
use std::any::Any;
|
||||
@ -27,8 +29,6 @@ use std::sync::Arc;
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::source_map;
|
||||
use syntax::parse::source_file_to_stream;
|
||||
use syntax::parse::parser::emit_unclosed_delims;
|
||||
use syntax::source_map::Spanned;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::{Span, FileName};
|
||||
|
21
src/librustc_parse/Cargo.toml
Normal file
21
src/librustc_parse/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
||||
[package]
|
||||
authors = ["The Rust Project Developers"]
|
||||
name = "rustc_parse"
|
||||
version = "0.0.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "rustc_parse"
|
||||
path = "lib.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
log = "0.4"
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
syntax = { path = "../libsyntax" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_lexer = { path = "../librustc_lexer" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
174
src/librustc_parse/error_codes.rs
Normal file
174
src/librustc_parse/error_codes.rs
Normal file
@ -0,0 +1,174 @@
|
||||
// Error messages for EXXXX errors.
|
||||
// Each message should start and end with a new line, and be wrapped to 80
|
||||
// characters. In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use
|
||||
// `:set tw=0` to disable.
|
||||
syntax::register_diagnostics! {
|
||||
|
||||
E0178: r##"
|
||||
In types, the `+` type operator has low precedence, so it is often necessary
|
||||
to use parentheses.
|
||||
|
||||
For example:
|
||||
|
||||
```compile_fail,E0178
|
||||
trait Foo {}
|
||||
|
||||
struct Bar<'a> {
|
||||
w: &'a Foo + Copy, // error, use &'a (Foo + Copy)
|
||||
x: &'a Foo + 'a, // error, use &'a (Foo + 'a)
|
||||
y: &'a mut Foo + 'a, // error, use &'a mut (Foo + 'a)
|
||||
z: fn() -> Foo + 'a, // error, use fn() -> (Foo + 'a)
|
||||
}
|
||||
```
|
||||
|
||||
More details can be found in [RFC 438].
|
||||
|
||||
[RFC 438]: https://github.com/rust-lang/rfcs/pull/438
|
||||
"##,
|
||||
|
||||
E0583: r##"
|
||||
A file wasn't found for an out-of-line module.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```ignore (compile_fail not working here; see Issue #43707)
|
||||
mod file_that_doesnt_exist; // error: file not found for module
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
Please be sure that a file corresponding to the module exists. If you
|
||||
want to use a module named `file_that_doesnt_exist`, you need to have a file
|
||||
named `file_that_doesnt_exist.rs` or `file_that_doesnt_exist/mod.rs` in the
|
||||
same directory.
|
||||
"##,
|
||||
|
||||
E0584: r##"
|
||||
A doc comment that is not attached to anything has been encountered.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0584
|
||||
trait Island {
|
||||
fn lost();
|
||||
|
||||
/// I'm lost!
|
||||
}
|
||||
```
|
||||
|
||||
A little reminder: a doc comment has to be placed before the item it's supposed
|
||||
to document. So if you want to document the `Island` trait, you need to put a
|
||||
doc comment before it, not inside it. Same goes for the `lost` method: the doc
|
||||
comment needs to be before it:
|
||||
|
||||
```
|
||||
/// I'm THE island!
|
||||
trait Island {
|
||||
/// I'm lost!
|
||||
fn lost();
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0585: r##"
|
||||
A documentation comment that doesn't document anything was found.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0585
|
||||
fn main() {
|
||||
// The following doc comment will fail:
|
||||
/// This is a useless doc comment!
|
||||
}
|
||||
```
|
||||
|
||||
Documentation comments need to be followed by items, including functions,
|
||||
types, modules, etc. Examples:
|
||||
|
||||
```
|
||||
/// I'm documenting the following struct:
|
||||
struct Foo;
|
||||
|
||||
/// I'm documenting the following function:
|
||||
fn foo() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0586: r##"
|
||||
An inclusive range was used with no end.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0586
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=]; // error: inclusive range was used with no end
|
||||
}
|
||||
```
|
||||
|
||||
An inclusive range needs an end in order to *include* it. If you just need a
|
||||
start and no end, use a non-inclusive range (with `..`):
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..]; // ok!
|
||||
}
|
||||
```
|
||||
|
||||
Or put an end to your inclusive range:
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=3]; // ok!
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0704: r##"
|
||||
This error indicates that a incorrect visibility restriction was specified.
|
||||
|
||||
Example of erroneous code:
|
||||
|
||||
```compile_fail,E0704
|
||||
mod foo {
|
||||
pub(foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To make struct `Bar` only visible in module `foo` the `in` keyword should be
|
||||
used:
|
||||
```
|
||||
mod foo {
|
||||
pub(in crate::foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
# fn main() {}
|
||||
```
|
||||
|
||||
For more information see the Rust Reference on [Visibility].
|
||||
|
||||
[Visibility]: https://doc.rust-lang.org/reference/visibility-and-privacy.html
|
||||
"##,
|
||||
|
||||
E0743: r##"
|
||||
C-variadic has been used on a non-foreign function.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0743
|
||||
fn foo2(x: u8, ...) {} // error!
|
||||
```
|
||||
|
||||
Only foreign functions can use C-variadic (`...`). It is used to give an
|
||||
undefined number of parameters to a given function (like `printf` in C). The
|
||||
equivalent in Rust would be to use macros directly.
|
||||
"##,
|
||||
|
||||
;
|
||||
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
use crate::token::{self, Token, TokenKind};
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::util::comments;
|
||||
use syntax::token::{self, Token, TokenKind};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax::util::comments;
|
||||
|
||||
use errors::{FatalError, DiagnosticBuilder};
|
||||
use syntax_pos::{BytePos, Pos, Span};
|
@ -3,9 +3,9 @@ use syntax_pos::Span;
|
||||
|
||||
use super::{StringReader, UnmatchedBrace};
|
||||
|
||||
use crate::print::pprust::token_to_string;
|
||||
use crate::token::{self, Token};
|
||||
use crate::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint};
|
||||
use syntax::print::pprust::token_to_string;
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint};
|
||||
|
||||
use errors::PResult;
|
||||
|
@ -6,7 +6,7 @@ use std::iter::once;
|
||||
use rustc_lexer::unescape::{EscapeError, Mode};
|
||||
use syntax_pos::{Span, BytePos};
|
||||
|
||||
use crate::errors::{Handler, Applicability};
|
||||
use syntax::errors::{Handler, Applicability};
|
||||
|
||||
pub(crate) fn emit_unescape_error(
|
||||
handler: &Handler,
|
@ -1,11 +1,12 @@
|
||||
//! The main parser interface.
|
||||
|
||||
use crate::ast;
|
||||
use crate::parse::parser::{Parser, emit_unclosed_delims, make_unclosed_delims_error};
|
||||
use crate::token::{self, Nonterminal};
|
||||
use crate::tokenstream::{self, TokenStream, TokenTree};
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
#![feature(crate_visibility_modifier)]
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::token::{self, Nonterminal};
|
||||
use syntax::tokenstream::{self, TokenStream, TokenTree};
|
||||
|
||||
use errors::{PResult, FatalError, Level, Diagnostic};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
@ -21,8 +22,10 @@ pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
|
||||
|
||||
#[macro_use]
|
||||
pub mod parser;
|
||||
use parser::{Parser, emit_unclosed_delims, make_unclosed_delims_error};
|
||||
pub mod lexer;
|
||||
pub mod validate_attr;
|
||||
pub mod error_codes;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Directory<'a> {
|
@ -1,12 +1,10 @@
|
||||
use super::{SeqSep, Parser, TokenType, PathStyle};
|
||||
use crate::attr;
|
||||
use crate::ast;
|
||||
use crate::util::comments;
|
||||
use crate::token::{self, Nonterminal, DelimToken};
|
||||
use crate::tokenstream::{TokenStream, TokenTree};
|
||||
use crate::source_map::Span;
|
||||
|
||||
use syntax_pos::Symbol;
|
||||
use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::util::comments;
|
||||
use syntax::token::{self, Nonterminal, DelimToken};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax_pos::{Span, Symbol};
|
||||
use errors::PResult;
|
||||
|
||||
use log::debug;
|
||||
@ -158,12 +156,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
};
|
||||
|
||||
Ok(ast::Attribute {
|
||||
kind: ast::AttrKind::Normal(item),
|
||||
id: attr::mk_attr_id(),
|
||||
style,
|
||||
span,
|
||||
})
|
||||
Ok(attr::mk_attr_from_item(style, item, span))
|
||||
}
|
||||
|
||||
/// Parses an inner part of an attribute (the path and following tokens).
|
@ -1,14 +1,16 @@
|
||||
use super::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, SeqSep, Parser};
|
||||
use crate::ast::{
|
||||
|
||||
use syntax::ast::{
|
||||
self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
|
||||
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind,
|
||||
};
|
||||
use crate::token::{self, TokenKind, token_can_begin_expr};
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::ThinVec;
|
||||
use crate::util::parser::AssocOp;
|
||||
use syntax::token::{self, TokenKind, token_can_begin_expr};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax::ThinVec;
|
||||
use syntax::util::parser::AssocOp;
|
||||
use syntax::struct_span_err;
|
||||
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, pluralize};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
@ -2,24 +2,23 @@ use super::{Parser, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode
|
||||
use super::{SemiColonMode, SeqSep, TokenExpectType};
|
||||
use super::pat::{GateOr, PARAM_EXPECTED};
|
||||
use super::diagnostics::Error;
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
|
||||
use crate::ast::{
|
||||
use syntax::ast::{
|
||||
self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode,
|
||||
Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind,
|
||||
FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit,
|
||||
};
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
use crate::token::{self, Token, TokenKind};
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::source_map::{self, Span};
|
||||
use crate::util::classify;
|
||||
use crate::util::literal::LitError;
|
||||
use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
|
||||
|
||||
use errors::{PResult, Applicability};
|
||||
use syntax::token::{self, Token, TokenKind};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::source_map::{self, Span};
|
||||
use syntax::util::classify;
|
||||
use syntax::util::literal::LitError;
|
||||
use syntax::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
use syntax_pos::Symbol;
|
||||
use errors::{PResult, Applicability};
|
||||
use std::mem;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
|
@ -1,9 +1,8 @@
|
||||
use super::Parser;
|
||||
|
||||
use crate::ast::{self, WhereClause, GenericParam, GenericParamKind, GenericBounds, Attribute};
|
||||
use crate::token;
|
||||
use crate::source_map::DUMMY_SP;
|
||||
|
||||
use syntax::ast::{self, WhereClause, GenericParam, GenericParamKind, GenericBounds, Attribute};
|
||||
use syntax::token;
|
||||
use syntax::source_map::DUMMY_SP;
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
|
||||
use errors::PResult;
|
@ -2,23 +2,24 @@ use super::{Parser, PathStyle};
|
||||
use super::diagnostics::{Error, dummy_arg, ConsumeClosingDelim};
|
||||
|
||||
use crate::maybe_whole;
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Abi, DUMMY_NODE_ID, Ident, Attribute, AttrKind, AttrStyle, AnonConst, Item};
|
||||
use crate::ast::{ItemKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind};
|
||||
use crate::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness};
|
||||
use crate::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind};
|
||||
use crate::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField};
|
||||
use crate::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, FnSig, SelfKind, Param};
|
||||
use crate::parse::token;
|
||||
use crate::tokenstream::{TokenTree, TokenStream};
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::source_map::{self, respan, Span};
|
||||
use crate::ThinVec;
|
||||
|
||||
use syntax::ast::{self, Abi, DUMMY_NODE_ID, Ident, Attribute, AttrKind, AttrStyle, AnonConst, Item};
|
||||
use syntax::ast::{ItemKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind};
|
||||
use syntax::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness};
|
||||
use syntax::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind};
|
||||
use syntax::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField};
|
||||
use syntax::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, FnSig, SelfKind, Param};
|
||||
use syntax::ptr::P;
|
||||
use syntax::ThinVec;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
use syntax::source_map::{self, respan, Span};
|
||||
use syntax_pos::BytePos;
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
|
||||
use log::debug;
|
||||
use std::mem;
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, StashKey};
|
||||
use syntax_pos::BytePos;
|
||||
|
||||
/// Whether the type alias or associated type is a concrete type or an opaque type.
|
||||
#[derive(Debug)]
|
@ -11,24 +11,26 @@ mod generics;
|
||||
mod diagnostics;
|
||||
use diagnostics::Error;
|
||||
|
||||
use crate::ast::{
|
||||
use crate::{Directory, DirectoryOwnership};
|
||||
use crate::lexer::UnmatchedBrace;
|
||||
|
||||
use syntax::ast::{
|
||||
self, Abi, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident,
|
||||
IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety,
|
||||
};
|
||||
use crate::parse::{Directory, DirectoryOwnership};
|
||||
use crate::parse::lexer::UnmatchedBrace;
|
||||
use crate::util::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use crate::token::{self, Token, TokenKind, DelimToken};
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::source_map::respan;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||
use crate::ThinVec;
|
||||
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::token::{self, Token, TokenKind, DelimToken};
|
||||
use syntax::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::respan;
|
||||
use syntax::struct_span_err;
|
||||
use syntax::util::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use syntax_pos::symbol::{kw, sym, Symbol};
|
||||
use syntax_pos::{Span, BytePos, DUMMY_SP, FileName};
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||
use log::debug;
|
||||
|
||||
use std::borrow::Cow;
|
@ -2,11 +2,12 @@ use super::Parser;
|
||||
use super::item::ItemInfo;
|
||||
use super::diagnostics::Error;
|
||||
|
||||
use crate::attr;
|
||||
use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate};
|
||||
use crate::parse::{new_sub_parser_from_file, DirectoryOwnership};
|
||||
use crate::token::{self, TokenKind};
|
||||
use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName};
|
||||
use crate::{new_sub_parser_from_file, DirectoryOwnership};
|
||||
|
||||
use syntax::attr;
|
||||
use syntax::ast::{self, Ident, Attribute, ItemKind, Mod, Crate};
|
||||
use syntax::token::{self, TokenKind};
|
||||
use syntax::source_map::{SourceMap, Span, DUMMY_SP, FileName};
|
||||
|
||||
use syntax_pos::symbol::sym;
|
||||
use errors::PResult;
|
@ -1,14 +1,13 @@
|
||||
use super::{Parser, PathStyle};
|
||||
|
||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac};
|
||||
use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind};
|
||||
use crate::mut_visit::{noop_visit_pat, noop_visit_mac, MutVisitor};
|
||||
use crate::token;
|
||||
use crate::print::pprust;
|
||||
use crate::source_map::{respan, Span, Spanned};
|
||||
use crate::ThinVec;
|
||||
use syntax::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac};
|
||||
use syntax::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind};
|
||||
use syntax::mut_visit::{noop_visit_pat, noop_visit_mac, MutVisitor};
|
||||
use syntax::ptr::P;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ThinVec;
|
||||
use syntax::token;
|
||||
use syntax::source_map::{respan, Span, Spanned};
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder};
|
||||
|
@ -1,10 +1,10 @@
|
||||
use super::{Parser, TokenType};
|
||||
|
||||
use crate::{maybe_whole, ThinVec};
|
||||
use crate::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs};
|
||||
use crate::ast::{AnonConst, GenericArg, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode};
|
||||
use crate::token::{self, Token};
|
||||
use crate::source_map::{Span, BytePos};
|
||||
use crate::maybe_whole;
|
||||
use syntax::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs};
|
||||
use syntax::ast::{AnonConst, GenericArg, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode};
|
||||
use syntax::ThinVec;
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::source_map::{Span, BytePos};
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
|
||||
use std::mem;
|
@ -3,16 +3,18 @@ use super::expr::LhsExpr;
|
||||
use super::path::PathStyle;
|
||||
use super::pat::GateOr;
|
||||
use super::diagnostics::Error;
|
||||
use crate::maybe_whole;
|
||||
use crate::DirectoryOwnership;
|
||||
|
||||
use crate::ptr::P;
|
||||
use crate::{maybe_whole, ThinVec};
|
||||
use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
|
||||
use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
|
||||
use crate::parse::DirectoryOwnership;
|
||||
use crate::util::classify;
|
||||
use crate::token;
|
||||
use crate::source_map::{respan, Span};
|
||||
use crate::symbol::{kw, sym};
|
||||
use syntax::ThinVec;
|
||||
use syntax::ptr::P;
|
||||
use syntax::ast;
|
||||
use syntax::ast::{DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
|
||||
use syntax::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
|
||||
use syntax::util::classify;
|
||||
use syntax::token;
|
||||
use syntax::source_map::{respan, Span};
|
||||
use syntax::symbol::{kw, sym};
|
||||
|
||||
use std::mem;
|
||||
use errors::{PResult, Applicability};
|
@ -2,13 +2,15 @@ use super::{Parser, PathStyle, PrevTokenKind, TokenType};
|
||||
use super::item::ParamCfg;
|
||||
|
||||
use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath};
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
|
||||
use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
|
||||
use crate::ast::{Mutability, AnonConst, Mac};
|
||||
use crate::token::{self, Token};
|
||||
use crate::source_map::Span;
|
||||
use crate::symbol::{kw};
|
||||
|
||||
use syntax::ptr::P;
|
||||
use syntax::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
|
||||
use syntax::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
|
||||
use syntax::ast::{Mutability, AnonConst, Mac};
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::source_map::Span;
|
||||
use syntax::struct_span_fatal;
|
||||
use syntax_pos::symbol::kw;
|
||||
|
||||
use errors::{PResult, Applicability, pluralize};
|
||||
|
@ -1,14 +1,13 @@
|
||||
//! Meta-syntax validation logic of attributes for post-expansion.
|
||||
|
||||
use crate::ast::{self, Attribute, AttrKind, Ident, MetaItem};
|
||||
use crate::attr::{AttributeTemplate, mk_name_value_item_str};
|
||||
use crate::sess::ParseSess;
|
||||
use crate::feature_gate::BUILTIN_ATTRIBUTE_MAP;
|
||||
use crate::early_buffered_lints::BufferedEarlyLintId;
|
||||
use crate::token;
|
||||
use crate::tokenstream::TokenTree;
|
||||
|
||||
use errors::{PResult, Applicability};
|
||||
use syntax::ast::{self, Attribute, AttrKind, Ident, MetaItem};
|
||||
use syntax::attr::{AttributeTemplate, mk_name_value_item_str};
|
||||
use syntax::early_buffered_lints::BufferedEarlyLintId;
|
||||
use syntax::feature_gate::BUILTIN_ATTRIBUTE_MAP;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::TokenTree;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax_pos::{Symbol, sym};
|
||||
|
||||
pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
|
@ -12,8 +12,9 @@ path = "lib.rs"
|
||||
log = "0.4"
|
||||
rustc = { path = "../librustc" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_index = { path = "../librustc_index" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
rustc_index = { path = "../librustc_index" }
|
||||
|
@ -7,15 +7,15 @@
|
||||
// or type checking or some other kind of complex analysis.
|
||||
|
||||
use std::mem;
|
||||
use syntax::print::pprust;
|
||||
use rustc::lint;
|
||||
use rustc::session::Session;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax::ast::*;
|
||||
use syntax::attr;
|
||||
use syntax::expand::is_proc_macro_attr;
|
||||
use syntax::feature_gate::is_builtin_attr;
|
||||
use syntax::parse::validate_attr;
|
||||
use syntax::print::pprust;
|
||||
use syntax::source_map::Spanned;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax::visit::{self, Visitor};
|
||||
|
@ -13,6 +13,7 @@ log = "0.4"
|
||||
rustc = { path = "../librustc" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_codegen_utils = { path = "../librustc_codegen_utils" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
serde_json = "1"
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
|
@ -1,8 +1,6 @@
|
||||
use rustc::session::Session;
|
||||
|
||||
use crate::generated_code;
|
||||
|
||||
use syntax::parse::lexer::{self, StringReader};
|
||||
use rustc::session::Session;
|
||||
use rustc_parse::lexer::{self, StringReader};
|
||||
use syntax::token::{self, TokenKind};
|
||||
use syntax_pos::*;
|
||||
|
||||
|
@ -11,10 +11,10 @@ use std::fmt::Display;
|
||||
use std::io;
|
||||
use std::io::prelude::*;
|
||||
|
||||
use syntax::source_map::SourceMap;
|
||||
use syntax::parse::lexer;
|
||||
use rustc_parse::lexer;
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::SourceMap;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use syntax_pos::{Span, FileName};
|
||||
|
@ -29,6 +29,7 @@ extern crate rustc_resolve;
|
||||
extern crate rustc_lint;
|
||||
extern crate rustc_interface;
|
||||
extern crate rustc_metadata;
|
||||
extern crate rustc_parse;
|
||||
extern crate rustc_target;
|
||||
extern crate rustc_typeck;
|
||||
extern crate rustc_lexer;
|
||||
|
@ -1,5 +1,5 @@
|
||||
use errors::Applicability;
|
||||
use syntax::parse::lexer::{StringReader as Lexer};
|
||||
use rustc_parse::lexer::{StringReader as Lexer};
|
||||
use syntax::token;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::FilePathMapping;
|
||||
|
@ -399,7 +399,8 @@ pub fn make_test(s: &str,
|
||||
// Uses libsyntax to parse the doctest and find if there's a main fn and the extern
|
||||
// crate already is included.
|
||||
let (already_has_main, already_has_extern_crate, found_macro) = with_globals(edition, || {
|
||||
use crate::syntax::{parse, sess::ParseSess, source_map::FilePathMapping};
|
||||
use crate::syntax::{sess::ParseSess, source_map::FilePathMapping};
|
||||
use rustc_parse::maybe_new_parser_from_source_str;
|
||||
use errors::emitter::EmitterWriter;
|
||||
use errors::Handler;
|
||||
|
||||
@ -418,7 +419,7 @@ pub fn make_test(s: &str,
|
||||
let mut found_extern_crate = cratename.is_none();
|
||||
let mut found_macro = false;
|
||||
|
||||
let mut parser = match parse::maybe_new_parser_from_source_str(&sess, filename, source) {
|
||||
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, source) {
|
||||
Ok(p) => p,
|
||||
Err(errs) => {
|
||||
for mut err in errs {
|
||||
|
@ -537,7 +537,7 @@ pub struct Pat {
|
||||
impl Pat {
|
||||
/// Attempt reparsing the pattern as a type.
|
||||
/// This is intended for use by diagnostics.
|
||||
pub(super) fn to_ty(&self) -> Option<P<Ty>> {
|
||||
pub fn to_ty(&self) -> Option<P<Ty>> {
|
||||
let kind = match &self.kind {
|
||||
// In a type expression `_` is an inference variable.
|
||||
PatKind::Wild => TyKind::Infer,
|
||||
@ -1031,7 +1031,7 @@ impl Expr {
|
||||
}
|
||||
}
|
||||
|
||||
fn to_bound(&self) -> Option<GenericBound> {
|
||||
pub fn to_bound(&self) -> Option<GenericBound> {
|
||||
match &self.kind {
|
||||
ExprKind::Path(None, path) => Some(GenericBound::Trait(
|
||||
PolyTraitRef::new(Vec::new(), path.clone(), self.span),
|
||||
@ -1042,7 +1042,7 @@ impl Expr {
|
||||
}
|
||||
|
||||
/// Attempts to reparse as `Ty` (for diagnostic purposes).
|
||||
pub(super) fn to_ty(&self) -> Option<P<Ty>> {
|
||||
pub fn to_ty(&self) -> Option<P<Ty>> {
|
||||
let kind = match &self.kind {
|
||||
// Trivial conversions.
|
||||
ExprKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()),
|
||||
|
@ -24,9 +24,9 @@ enum AttrError {
|
||||
/// Only top-level shape (`#[attr]` vs `#[attr(...)]` vs `#[attr = ...]`) is considered now.
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct AttributeTemplate {
|
||||
crate word: bool,
|
||||
crate list: Option<&'static str>,
|
||||
crate name_value_str: Option<&'static str>,
|
||||
pub word: bool,
|
||||
pub list: Option<&'static str>,
|
||||
pub name_value_str: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl AttributeTemplate {
|
||||
|
@ -277,7 +277,7 @@ impl MetaItem {
|
||||
}
|
||||
|
||||
impl AttrItem {
|
||||
crate fn meta(&self, span: Span) -> Option<MetaItem> {
|
||||
pub fn meta(&self, span: Span) -> Option<MetaItem> {
|
||||
let mut tokens = self.tokens.trees().peekable();
|
||||
Some(MetaItem {
|
||||
path: self.path.clone(),
|
||||
|
@ -4,28 +4,6 @@
|
||||
// `:set tw=0` to disable.
|
||||
register_diagnostics! {
|
||||
|
||||
E0178: r##"
|
||||
In types, the `+` type operator has low precedence, so it is often necessary
|
||||
to use parentheses.
|
||||
|
||||
For example:
|
||||
|
||||
```compile_fail,E0178
|
||||
trait Foo {}
|
||||
|
||||
struct Bar<'a> {
|
||||
w: &'a Foo + Copy, // error, use &'a (Foo + Copy)
|
||||
x: &'a Foo + 'a, // error, use &'a (Foo + 'a)
|
||||
y: &'a mut Foo + 'a, // error, use &'a mut (Foo + 'a)
|
||||
z: fn() -> Foo + 'a, // error, use fn() -> (Foo + 'a)
|
||||
}
|
||||
```
|
||||
|
||||
More details can be found in [RFC 438].
|
||||
|
||||
[RFC 438]: https://github.com/rust-lang/rfcs/pull/438
|
||||
"##,
|
||||
|
||||
E0536: r##"
|
||||
The `not` cfg-predicate was malformed.
|
||||
|
||||
@ -278,106 +256,6 @@ pub fn something() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0583: r##"
|
||||
A file wasn't found for an out-of-line module.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```ignore (compile_fail not working here; see Issue #43707)
|
||||
mod file_that_doesnt_exist; // error: file not found for module
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
Please be sure that a file corresponding to the module exists. If you
|
||||
want to use a module named `file_that_doesnt_exist`, you need to have a file
|
||||
named `file_that_doesnt_exist.rs` or `file_that_doesnt_exist/mod.rs` in the
|
||||
same directory.
|
||||
"##,
|
||||
|
||||
E0584: r##"
|
||||
A doc comment that is not attached to anything has been encountered.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0584
|
||||
trait Island {
|
||||
fn lost();
|
||||
|
||||
/// I'm lost!
|
||||
}
|
||||
```
|
||||
|
||||
A little reminder: a doc comment has to be placed before the item it's supposed
|
||||
to document. So if you want to document the `Island` trait, you need to put a
|
||||
doc comment before it, not inside it. Same goes for the `lost` method: the doc
|
||||
comment needs to be before it:
|
||||
|
||||
```
|
||||
/// I'm THE island!
|
||||
trait Island {
|
||||
/// I'm lost!
|
||||
fn lost();
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0585: r##"
|
||||
A documentation comment that doesn't document anything was found.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0585
|
||||
fn main() {
|
||||
// The following doc comment will fail:
|
||||
/// This is a useless doc comment!
|
||||
}
|
||||
```
|
||||
|
||||
Documentation comments need to be followed by items, including functions,
|
||||
types, modules, etc. Examples:
|
||||
|
||||
```
|
||||
/// I'm documenting the following struct:
|
||||
struct Foo;
|
||||
|
||||
/// I'm documenting the following function:
|
||||
fn foo() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0586: r##"
|
||||
An inclusive range was used with no end.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0586
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=]; // error: inclusive range was used with no end
|
||||
}
|
||||
```
|
||||
|
||||
An inclusive range needs an end in order to *include* it. If you just need a
|
||||
start and no end, use a non-inclusive range (with `..`):
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..]; // ok!
|
||||
}
|
||||
```
|
||||
|
||||
Or put an end to your inclusive range:
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=3]; // ok!
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0589: r##"
|
||||
The value of `N` that was specified for `repr(align(N))` was not a power
|
||||
of two, or was greater than 2^29.
|
||||
@ -446,35 +324,6 @@ and likely to change in the future.
|
||||
|
||||
"##,
|
||||
|
||||
E0704: r##"
|
||||
This error indicates that a incorrect visibility restriction was specified.
|
||||
|
||||
Example of erroneous code:
|
||||
|
||||
```compile_fail,E0704
|
||||
mod foo {
|
||||
pub(foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To make struct `Bar` only visible in module `foo` the `in` keyword should be
|
||||
used:
|
||||
```
|
||||
mod foo {
|
||||
pub(in crate::foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
# fn main() {}
|
||||
```
|
||||
|
||||
For more information see the Rust Reference on [Visibility].
|
||||
|
||||
[Visibility]: https://doc.rust-lang.org/reference/visibility-and-privacy.html
|
||||
"##,
|
||||
|
||||
E0705: r##"
|
||||
A `#![feature]` attribute was declared for a feature that is stable in
|
||||
the current edition, but not in all editions.
|
||||
@ -504,20 +353,6 @@ Delete the offending feature attribute, or add it to the list of allowed
|
||||
features in the `-Z allow_features` flag.
|
||||
"##,
|
||||
|
||||
E0743: r##"
|
||||
C-variadic has been used on a non-foreign function.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0743
|
||||
fn foo2(x: u8, ...) {} // error!
|
||||
```
|
||||
|
||||
Only foreign functions can use C-variadic (`...`). It is used to give an
|
||||
undefined number of parameters to a given function (like `printf` in C). The
|
||||
equivalent in Rust would be to use macros directly.
|
||||
"##,
|
||||
|
||||
;
|
||||
|
||||
E0539, // incorrect meta item
|
||||
|
@ -26,8 +26,6 @@ pub use rustc_data_structures::thin_vec::ThinVec;
|
||||
use ast::AttrId;
|
||||
use syntax_pos::edition::Edition;
|
||||
|
||||
pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! unwrap_or {
|
||||
($opt:expr, $default:expr) => {
|
||||
@ -82,10 +80,10 @@ pub mod diagnostics {
|
||||
pub mod error_codes;
|
||||
|
||||
pub mod util {
|
||||
crate mod classify;
|
||||
pub mod classify;
|
||||
pub mod comments;
|
||||
pub mod lev_distance;
|
||||
crate mod literal;
|
||||
pub mod literal;
|
||||
pub mod node_count;
|
||||
pub mod parser;
|
||||
pub mod map_in_place;
|
||||
@ -100,7 +98,6 @@ pub mod source_map;
|
||||
pub mod entry;
|
||||
pub mod feature_gate;
|
||||
pub mod mut_visit;
|
||||
pub mod parse;
|
||||
pub mod ptr;
|
||||
pub mod show_span;
|
||||
pub use syntax_pos::edition;
|
||||
|
@ -321,7 +321,7 @@ fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
|
||||
token_kind_to_string_ext(&token.kind, convert_dollar_crate)
|
||||
}
|
||||
|
||||
crate fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
||||
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
||||
match *nt {
|
||||
token::NtExpr(ref e) => expr_to_string(e),
|
||||
token::NtMeta(ref e) => attr_item_to_string(e),
|
||||
@ -958,7 +958,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) {
|
||||
pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) {
|
||||
if let Some(lt) = *lifetime {
|
||||
self.print_lifetime(lt);
|
||||
self.nbsp();
|
||||
@ -973,7 +973,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_type(&mut self, ty: &ast::Ty) {
|
||||
pub fn print_type(&mut self, ty: &ast::Ty) {
|
||||
self.maybe_print_comment(ty.span.lo());
|
||||
self.ibox(0);
|
||||
match ty.kind {
|
||||
@ -1998,7 +1998,7 @@ impl<'a> State<'a> {
|
||||
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
|
||||
}
|
||||
|
||||
crate fn print_expr(&mut self, expr: &ast::Expr) {
|
||||
pub fn print_expr(&mut self, expr: &ast::Expr) {
|
||||
self.print_expr_outer_attr_style(expr, true)
|
||||
}
|
||||
|
||||
@ -2335,7 +2335,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_usize(&mut self, i: usize) {
|
||||
pub fn print_usize(&mut self, i: usize) {
|
||||
self.s.word(i.to_string())
|
||||
}
|
||||
|
||||
@ -2604,7 +2604,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_type_bounds(&mut self, prefix: &'static str, bounds: &[ast::GenericBound]) {
|
||||
pub fn print_type_bounds(&mut self, prefix: &'static str, bounds: &[ast::GenericBound]) {
|
||||
if !bounds.is_empty() {
|
||||
self.s.word(prefix);
|
||||
let mut first = true;
|
||||
@ -2763,7 +2763,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_mutability(&mut self, mutbl: ast::Mutability) {
|
||||
pub fn print_mutability(&mut self, mutbl: ast::Mutability) {
|
||||
match mutbl {
|
||||
ast::Mutability::Mutable => self.word_nbsp("mut"),
|
||||
ast::Mutability::Immutable => {},
|
||||
|
@ -71,14 +71,14 @@ impl GatedSpans {
|
||||
/// Info about a parsing session.
|
||||
pub struct ParseSess {
|
||||
pub span_diagnostic: Handler,
|
||||
crate unstable_features: UnstableFeatures,
|
||||
pub unstable_features: UnstableFeatures,
|
||||
pub config: CrateConfig,
|
||||
pub edition: Edition,
|
||||
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
|
||||
/// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
|
||||
pub raw_identifier_spans: Lock<Vec<Span>>,
|
||||
/// Used to determine and report recursive module inclusions.
|
||||
pub(super) included_mod_stack: Lock<Vec<PathBuf>>,
|
||||
pub included_mod_stack: Lock<Vec<PathBuf>>,
|
||||
source_map: Lrc<SourceMap>,
|
||||
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
||||
/// Contains the spans of block expressions that could have been incomplete based on the
|
||||
|
@ -107,14 +107,14 @@ impl fmt::Display for Lit {
|
||||
|
||||
impl LitKind {
|
||||
/// An English article for the literal token kind.
|
||||
crate fn article(self) -> &'static str {
|
||||
pub fn article(self) -> &'static str {
|
||||
match self {
|
||||
Integer | Err => "an",
|
||||
_ => "a",
|
||||
}
|
||||
}
|
||||
|
||||
crate fn descr(self) -> &'static str {
|
||||
pub fn descr(self) -> &'static str {
|
||||
match self {
|
||||
Bool => panic!("literal token contains `Lit::Bool`"),
|
||||
Byte => "byte",
|
||||
@ -141,12 +141,12 @@ impl Lit {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
|
||||
pub fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
|
||||
let ident_token = Token::new(Ident(name, is_raw), span);
|
||||
token_can_begin_expr(&ident_token)
|
||||
}
|
||||
|
||||
pub(crate) fn token_can_begin_expr(ident_token: &Token) -> bool {
|
||||
pub fn token_can_begin_expr(ident_token: &Token) -> bool {
|
||||
!ident_token.is_reserved_ident() ||
|
||||
ident_token.is_path_segment_keyword() ||
|
||||
match ident_token.kind {
|
||||
@ -276,7 +276,7 @@ impl TokenKind {
|
||||
|
||||
/// Returns tokens that are likely to be typed accidentally instead of the current token.
|
||||
/// Enables better error recovery when the wrong token is found.
|
||||
crate fn similar_tokens(&self) -> Option<Vec<TokenKind>> {
|
||||
pub fn similar_tokens(&self) -> Option<Vec<TokenKind>> {
|
||||
match *self {
|
||||
Comma => Some(vec![Dot, Lt, Semi]),
|
||||
Semi => Some(vec![Colon, Comma]),
|
||||
@ -291,7 +291,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Some token that will be thrown away later.
|
||||
crate fn dummy() -> Self {
|
||||
pub fn dummy() -> Self {
|
||||
Token::new(TokenKind::Whitespace, DUMMY_SP)
|
||||
}
|
||||
|
||||
@ -305,7 +305,7 @@ impl Token {
|
||||
mem::replace(self, Token::dummy())
|
||||
}
|
||||
|
||||
crate fn is_op(&self) -> bool {
|
||||
pub fn is_op(&self) -> bool {
|
||||
match self.kind {
|
||||
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
|
||||
Ident(..) | Lifetime(..) | Interpolated(..) |
|
||||
@ -314,7 +314,7 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn is_like_plus(&self) -> bool {
|
||||
pub fn is_like_plus(&self) -> bool {
|
||||
match self.kind {
|
||||
BinOp(Plus) | BinOpEq(Plus) => true,
|
||||
_ => false,
|
||||
@ -377,7 +377,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token can appear at the start of a const param.
|
||||
crate fn can_begin_const_arg(&self) -> bool {
|
||||
pub fn can_begin_const_arg(&self) -> bool {
|
||||
match self.kind {
|
||||
OpenDelim(Brace) => true,
|
||||
Interpolated(ref nt) => match **nt {
|
||||
@ -389,7 +389,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token can appear at the start of a generic bound.
|
||||
crate fn can_begin_bound(&self) -> bool {
|
||||
pub fn can_begin_bound(&self) -> bool {
|
||||
self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
|
||||
self == &Question || self == &OpenDelim(Paren)
|
||||
}
|
||||
@ -446,13 +446,13 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a lifetime.
|
||||
crate fn is_lifetime(&self) -> bool {
|
||||
pub fn is_lifetime(&self) -> bool {
|
||||
self.lifetime().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a identifier whose name is the given
|
||||
/// string slice.
|
||||
crate fn is_ident_named(&self, name: Symbol) -> bool {
|
||||
pub fn is_ident_named(&self, name: Symbol) -> bool {
|
||||
self.ident().map_or(false, |(ident, _)| ident.name == name)
|
||||
}
|
||||
|
||||
@ -469,7 +469,7 @@ impl Token {
|
||||
/// Would `maybe_whole_expr` in `parser.rs` return `Ok(..)`?
|
||||
/// That is, is this a pre-parsed expression dropped into the token stream
|
||||
/// (which happens while parsing the result of macro expansion)?
|
||||
crate fn is_whole_expr(&self) -> bool {
|
||||
pub fn is_whole_expr(&self) -> bool {
|
||||
if let Interpolated(ref nt) = self.kind {
|
||||
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
|
||||
return true;
|
||||
@ -480,16 +480,16 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
||||
crate fn is_mutability(&self) -> bool {
|
||||
pub fn is_mutability(&self) -> bool {
|
||||
self.is_keyword(kw::Mut) ||
|
||||
self.is_keyword(kw::Const)
|
||||
}
|
||||
|
||||
crate fn is_qpath_start(&self) -> bool {
|
||||
pub fn is_qpath_start(&self) -> bool {
|
||||
self == &Lt || self == &BinOp(Shl)
|
||||
}
|
||||
|
||||
crate fn is_path_start(&self) -> bool {
|
||||
pub fn is_path_start(&self) -> bool {
|
||||
self == &ModSep || self.is_qpath_start() || self.is_path() ||
|
||||
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
|
||||
}
|
||||
@ -499,23 +499,23 @@ impl Token {
|
||||
self.is_non_raw_ident_where(|id| id.name == kw)
|
||||
}
|
||||
|
||||
crate fn is_path_segment_keyword(&self) -> bool {
|
||||
pub fn is_path_segment_keyword(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_path_segment_keyword)
|
||||
}
|
||||
|
||||
// Returns true for reserved identifiers used internally for elided lifetimes,
|
||||
// unnamed method parameters, crate root module, error recovery etc.
|
||||
crate fn is_special_ident(&self) -> bool {
|
||||
pub fn is_special_ident(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_special)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a keyword used in the language.
|
||||
crate fn is_used_keyword(&self) -> bool {
|
||||
pub fn is_used_keyword(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_used_keyword)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a keyword reserved for possible future use.
|
||||
crate fn is_unused_keyword(&self) -> bool {
|
||||
pub fn is_unused_keyword(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_unused_keyword)
|
||||
}
|
||||
|
||||
@ -525,7 +525,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is the identifier `true` or `false`.
|
||||
crate fn is_bool_lit(&self) -> bool {
|
||||
pub fn is_bool_lit(&self) -> bool {
|
||||
self.is_non_raw_ident_where(|id| id.name.is_bool_lit())
|
||||
}
|
||||
|
||||
@ -537,7 +537,7 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn glue(&self, joint: &Token) -> Option<Token> {
|
||||
pub fn glue(&self, joint: &Token) -> Option<Token> {
|
||||
let kind = match self.kind {
|
||||
Eq => match joint.kind {
|
||||
Eq => EqEq,
|
||||
|
@ -32,14 +32,14 @@ pub struct Comment {
|
||||
pub pos: BytePos,
|
||||
}
|
||||
|
||||
crate fn is_line_doc_comment(s: &str) -> bool {
|
||||
pub fn is_line_doc_comment(s: &str) -> bool {
|
||||
let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') ||
|
||||
s.starts_with("//!");
|
||||
debug!("is {:?} a doc comment? {}", s, res);
|
||||
res
|
||||
}
|
||||
|
||||
crate fn is_block_doc_comment(s: &str) -> bool {
|
||||
pub fn is_block_doc_comment(s: &str) -> bool {
|
||||
// Prevent `/**/` from being parsed as a doc comment
|
||||
let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') ||
|
||||
s.starts_with("/*!")) && s.len() >= 5;
|
||||
|
@ -14,7 +14,7 @@ use rustc_lexer::unescape::{unescape_raw_str, unescape_raw_byte_str};
|
||||
|
||||
use std::ascii;
|
||||
|
||||
crate enum LitError {
|
||||
pub enum LitError {
|
||||
NotLiteral,
|
||||
LexerError,
|
||||
InvalidSuffix,
|
||||
@ -185,12 +185,12 @@ impl LitKind {
|
||||
|
||||
impl Lit {
|
||||
/// Converts literal token into an AST literal.
|
||||
crate fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
|
||||
pub fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
|
||||
Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span })
|
||||
}
|
||||
|
||||
/// Converts arbitrary token into an AST literal.
|
||||
crate fn from_token(token: &Token) -> Result<Lit, LitError> {
|
||||
pub fn from_token(token: &Token) -> Result<Lit, LitError> {
|
||||
let lit = match token.kind {
|
||||
token::Ident(name, false) if name.is_bool_lit() =>
|
||||
token::Lit::new(token::Bool, name, None),
|
||||
@ -217,8 +217,8 @@ impl Lit {
|
||||
Lit { token: kind.to_lit_token(), kind, span }
|
||||
}
|
||||
|
||||
/// Losslessly convert an AST literal into a token tree.
|
||||
crate fn token_tree(&self) -> TokenTree {
|
||||
/// Losslessly convert an AST literal into a token stream.
|
||||
pub fn token_tree(&self) -> TokenTree {
|
||||
let token = match self.token.kind {
|
||||
token::Bool => token::Ident(self.token.symbol, false),
|
||||
_ => token::Literal(self.token),
|
||||
|
@ -69,7 +69,7 @@ pub enum Fixity {
|
||||
|
||||
impl AssocOp {
|
||||
/// Creates a new AssocOP from a token
|
||||
crate fn from_token(t: &Token) -> Option<AssocOp> {
|
||||
pub fn from_token(t: &Token) -> Option<AssocOp> {
|
||||
use AssocOp::*;
|
||||
match t.kind {
|
||||
token::BinOpEq(k) => Some(AssignOp(k)),
|
||||
@ -358,7 +358,7 @@ impl ExprPrecedence {
|
||||
}
|
||||
|
||||
/// In `let p = e`, operators with precedence `<=` this one requires parenthesis in `e`.
|
||||
crate fn prec_let_scrutinee_needs_par() -> usize {
|
||||
pub fn prec_let_scrutinee_needs_par() -> usize {
|
||||
AssocOp::LAnd.precedence()
|
||||
}
|
||||
|
||||
|
@ -11,15 +11,12 @@ path = "lib.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
rustc_serialize = { path = "../libserialize", package = "serialize" }
|
||||
log = "0.4"
|
||||
scoped-tls = "1.0"
|
||||
lazy_static = "1.0.0"
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_index = { path = "../librustc_index" }
|
||||
rustc_lexer = { path = "../librustc_lexer" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
syntax = { path = "../libsyntax" }
|
||||
|
@ -1,15 +1,15 @@
|
||||
use crate::expand::{self, AstFragment, Invocation};
|
||||
|
||||
use rustc_parse::{self, parser, DirectoryOwnership, MACRO_ARGUMENTS};
|
||||
use syntax::ast::{self, NodeId, Attribute, Name, PatKind};
|
||||
use syntax::attr::{self, HasAttrs, Stability, Deprecation};
|
||||
use syntax::source_map::SourceMap;
|
||||
use syntax::edition::Edition;
|
||||
use syntax::mut_visit::{self, MutVisitor};
|
||||
use syntax::parse::{self, parser, DirectoryOwnership};
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym, Ident, Symbol};
|
||||
use syntax::{ThinVec, MACRO_ARGUMENTS};
|
||||
use syntax::ThinVec;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{self, TokenStream};
|
||||
use syntax::visit::Visitor;
|
||||
@ -18,9 +18,9 @@ use errors::{DiagnosticBuilder, DiagnosticId};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP};
|
||||
use syntax_pos::hygiene::{AstPass, ExpnId, ExpnData, ExpnKind};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::{self, Lrc};
|
||||
|
||||
use std::iter;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
@ -922,7 +922,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
expand::MacroExpander::new(self, true)
|
||||
}
|
||||
pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
|
||||
parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
|
||||
rustc_parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
|
||||
}
|
||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||
pub fn parse_sess(&self) -> &'a ParseSess { self.parse_sess }
|
||||
|
@ -1,3 +1,4 @@
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax::attr::HasAttrs;
|
||||
use syntax::feature_gate::{
|
||||
feature_err,
|
||||
@ -10,11 +11,10 @@ use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::edition::Edition;
|
||||
use syntax::mut_visit::*;
|
||||
use syntax::parse::{self, validate_attr};
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::sym;
|
||||
use syntax::util::map_in_place::MapInPlace;
|
||||
use syntax_pos::symbol::sym;
|
||||
|
||||
use errors::Applicability;
|
||||
use smallvec::SmallVec;
|
||||
@ -113,7 +113,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let res = parse::parse_in_attr(self.sess, &attr, |p| p.parse_cfg_attr());
|
||||
let res = rustc_parse::parse_in_attr(self.sess, &attr, |p| p.parse_cfg_attr());
|
||||
let (cfg_predicate, expanded_attrs) = match res {
|
||||
Ok(result) => result,
|
||||
Err(mut e) => {
|
||||
|
@ -6,15 +6,15 @@ use crate::placeholders::{placeholder, PlaceholderExpander};
|
||||
use crate::config::StripUnconfigured;
|
||||
use crate::configure;
|
||||
|
||||
use rustc_parse::DirectoryOwnership;
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax::ast::{self, AttrItem, Block, Ident, LitKind, NodeId, PatKind, Path};
|
||||
use syntax::ast::{MacStmtStyle, StmtKind, ItemKind};
|
||||
use syntax::attr::{self, HasAttrs};
|
||||
use syntax::source_map::respan;
|
||||
use syntax::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
|
||||
use syntax::mut_visit::*;
|
||||
use syntax::parse::DirectoryOwnership;
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::parse::validate_attr;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
|
@ -76,9 +76,9 @@ use TokenTreeOrTokenTreeSlice::*;
|
||||
|
||||
use crate::mbe::{self, TokenTree};
|
||||
|
||||
use rustc_parse::Directory;
|
||||
use rustc_parse::parser::{Parser, PathStyle};
|
||||
use syntax::ast::{Ident, Name};
|
||||
use syntax::parse::Directory;
|
||||
use syntax::parse::parser::{Parser, PathStyle};
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
@ -652,7 +652,7 @@ pub(super) fn parse(
|
||||
directory,
|
||||
recurse_into_modules,
|
||||
true,
|
||||
syntax::MACRO_ARGUMENTS,
|
||||
rustc_parse::MACRO_ARGUMENTS,
|
||||
);
|
||||
|
||||
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
||||
|
@ -8,30 +8,30 @@ use crate::mbe::macro_parser::{Error, Failure, Success};
|
||||
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedParseResult};
|
||||
use crate::mbe::transcribe::transcribe;
|
||||
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse::Directory;
|
||||
use syntax::ast;
|
||||
use syntax::attr::{self, TransparencyError};
|
||||
use syntax::edition::Edition;
|
||||
use syntax::feature_gate::Features;
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::parse::Directory;
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::token::{self, NtTT, Token, TokenKind::*};
|
||||
use syntax::tokenstream::{DelimSpan, TokenStream};
|
||||
|
||||
use errors::{DiagnosticBuilder, FatalError};
|
||||
use log::debug;
|
||||
use syntax_pos::hygiene::Transparency;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use errors::{DiagnosticBuilder, FatalError};
|
||||
use log::debug;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::{mem, slice};
|
||||
|
||||
use errors::Applicability;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
|
||||
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
|
||||
|
@ -1,18 +1,18 @@
|
||||
use crate::config::process_configure_mod;
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_parse::lexer::StringReader;
|
||||
use syntax::token::{self, Token, TokenKind};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{SourceMap, FilePathMapping};
|
||||
use syntax::util::comments::is_doc_comment;
|
||||
use syntax::with_default_globals;
|
||||
use syntax::parse::lexer::StringReader;
|
||||
use syntax_pos::symbol::Symbol;
|
||||
use syntax_pos::{BytePos, Span};
|
||||
|
||||
use errors::{Handler, emitter::EmitterWriter};
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use syntax_pos::{BytePos, Span};
|
||||
|
||||
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
|
||||
let emitter = EmitterWriter::new(
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::config::process_configure_mod;
|
||||
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
|
||||
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use syntax::ast::{self, Name, PatKind};
|
||||
use syntax::attr::first_attr_value_str_by_name;
|
||||
use syntax::sess::ParseSess;
|
||||
@ -12,7 +13,6 @@ use syntax::symbol::{kw, sym};
|
||||
use syntax::tokenstream::{DelimSpan, TokenTree, TokenStream};
|
||||
use syntax::visit;
|
||||
use syntax::with_default_globals;
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax_pos::{Span, BytePos, Pos, FileName};
|
||||
use errors::PResult;
|
||||
|
||||
|
@ -4,7 +4,6 @@ use crate::proc_macro_server;
|
||||
use syntax::ast::{self, ItemKind, Attribute, Mac};
|
||||
use syntax::attr::{mark_used, mark_known};
|
||||
use syntax::errors::{Applicability, FatalError};
|
||||
use syntax::parse;
|
||||
use syntax::symbol::sym;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{self, TokenStream};
|
||||
@ -135,7 +134,11 @@ impl MultiItemModifier for ProcMacroDerive {
|
||||
let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
|
||||
let msg = "proc-macro derive produced unparseable tokens";
|
||||
|
||||
let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
|
||||
let mut parser = rustc_parse::stream_to_parser(
|
||||
ecx.parse_sess,
|
||||
stream,
|
||||
Some("proc-macro derive"),
|
||||
);
|
||||
let mut items = vec![];
|
||||
|
||||
loop {
|
||||
@ -200,7 +203,7 @@ crate fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>)
|
||||
if attr.get_normal_item().tokens.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
parse::parse_in_attr(cx.parse_sess, attr, |p| p.parse_derive_paths())
|
||||
rustc_parse::parse_in_attr(cx.parse_sess, attr, |p| p.parse_derive_paths())
|
||||
};
|
||||
|
||||
match parse_derive_paths(attr) {
|
||||
|
@ -1,17 +1,17 @@
|
||||
use crate::base::ExtCtxt;
|
||||
|
||||
use rustc_parse::{parse_stream_from_source_str, nt_to_tokenstream};
|
||||
use syntax::ast;
|
||||
use syntax::parse;
|
||||
use syntax::util::comments;
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
|
||||
use syntax_pos::symbol::{kw, sym, Symbol};
|
||||
|
||||
use errors::Diagnostic;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
|
||||
use syntax_pos::symbol::{kw, sym, Symbol};
|
||||
|
||||
use pm::{Delimiter, Level, LineColumn, Spacing};
|
||||
use pm::bridge::{server, TokenTree};
|
||||
@ -178,7 +178,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
}
|
||||
|
||||
Interpolated(nt) => {
|
||||
let stream = parse::nt_to_tokenstream(&nt, sess, span);
|
||||
let stream = nt_to_tokenstream(&nt, sess, span);
|
||||
TokenTree::Group(Group {
|
||||
delimiter: Delimiter::None,
|
||||
stream,
|
||||
@ -402,7 +402,7 @@ impl server::TokenStream for Rustc<'_> {
|
||||
stream.is_empty()
|
||||
}
|
||||
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||
parse::parse_stream_from_source_str(
|
||||
parse_stream_from_source_str(
|
||||
FileName::proc_macro_source_code(src),
|
||||
src.to_string(),
|
||||
self.sess,
|
||||
|
@ -1,10 +1,10 @@
|
||||
use crate::config::process_configure_mod;
|
||||
use rustc_parse::{source_file_to_stream, new_parser_from_source_str, parser::Parser};
|
||||
use syntax::ast;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{SourceMap, FilePathMapping};
|
||||
use syntax::with_default_globals;
|
||||
use syntax::parse::{source_file_to_stream, new_parser_from_source_str, parser::Parser};
|
||||
use syntax_pos::{BytePos, Span, MultiSpan};
|
||||
|
||||
use errors::emitter::EmitterWriter;
|
||||
|
@ -14,6 +14,7 @@ errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
fmt_macros = { path = "../libfmt_macros" }
|
||||
log = "0.4"
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
syntax = { path = "../libsyntax" }
|
||||
|
@ -1,13 +1,13 @@
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
|
||||
use rustc_parse::parser::Parser;
|
||||
use syntax::ast::{self, *};
|
||||
use syntax_expand::base::*;
|
||||
use syntax::token::{self, TokenKind};
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax_expand::base::*;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
pub fn expand_assert<'cx>(
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
use syntax::ast::{self, AttrItem, AttrStyle};
|
||||
use syntax::attr::mk_attr;
|
||||
use syntax::parse;
|
||||
use syntax::token;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax_expand::panictry;
|
||||
@ -10,7 +9,7 @@ use syntax_pos::FileName;
|
||||
|
||||
pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate {
|
||||
for raw_attr in attrs {
|
||||
let mut parser = parse::new_parser_from_source_str(
|
||||
let mut parser = rustc_parse::new_parser_from_source_str(
|
||||
parse_sess,
|
||||
FileName::cli_crate_attr_source_code(&raw_attr),
|
||||
raw_attr.clone(),
|
||||
|
@ -1,13 +1,13 @@
|
||||
use syntax_expand::panictry;
|
||||
use syntax_expand::base::{self, *};
|
||||
use rustc_parse::{self, DirectoryOwnership, new_sub_parser_from_file, parser::Parser};
|
||||
use syntax::ast;
|
||||
use syntax::parse::{self, DirectoryOwnership};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::early_buffered_lints::BufferedEarlyLintId;
|
||||
use syntax_expand::panictry;
|
||||
use syntax_expand::base::{self, *};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
use syntax_pos::{self, Pos, Span};
|
||||
@ -85,10 +85,10 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
},
|
||||
};
|
||||
let directory_ownership = DirectoryOwnership::Owned { relative: None };
|
||||
let p = parse::new_sub_parser_from_file(cx.parse_sess(), &file, directory_ownership, None, sp);
|
||||
let p = new_sub_parser_from_file(cx.parse_sess(), &file, directory_ownership, None, sp);
|
||||
|
||||
struct ExpandResult<'a> {
|
||||
p: parse::parser::Parser<'a>,
|
||||
p: Parser<'a>,
|
||||
}
|
||||
impl<'a> base::MacResult for ExpandResult<'a> {
|
||||
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax_pos::Symbol;
|
||||
use syntax::ast::MetaItem;
|
||||
use syntax::attr::AttributeTemplate;
|
||||
use syntax::parse::validate_attr;
|
||||
use syntax_expand::base::ExtCtxt;
|
||||
|
||||
pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) {
|
||||
|
@ -7,9 +7,13 @@
|
||||
|
||||
extern crate syntax;
|
||||
extern crate syntax_expand;
|
||||
extern crate rustc_parse;
|
||||
extern crate rustc_errors;
|
||||
|
||||
use rustc_errors::PResult;
|
||||
use rustc_parse::parser::attr::*;
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use rustc_parse::parser::Parser;
|
||||
use syntax::ast::*;
|
||||
use syntax::attr::*;
|
||||
use syntax::ast;
|
||||
@ -17,9 +21,6 @@ use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{FilePathMapping, FileName};
|
||||
use syntax::ptr::P;
|
||||
use syntax::print::pprust;
|
||||
use syntax::parse::parser::attr::*;
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::token;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use std::fmt;
|
||||
|
@ -6,11 +6,12 @@
|
||||
|
||||
extern crate syntax;
|
||||
extern crate syntax_expand;
|
||||
extern crate rustc_parse;
|
||||
|
||||
use rustc_parse::new_parser_from_file;
|
||||
use std::path::Path;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax::parse::new_parser_from_file;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
|
||||
#[path = "mod_dir_simple/test.rs"]
|
||||
|
@ -22,14 +22,15 @@
|
||||
extern crate rustc_data_structures;
|
||||
extern crate syntax;
|
||||
extern crate syntax_expand;
|
||||
extern crate rustc_parse;
|
||||
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use syntax::ast::*;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{Spanned, DUMMY_SP, FileName};
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax::mut_visit::{self, MutVisitor, visit_clobber};
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
|
Loading…
Reference in New Issue
Block a user