Auto merge of #63469 - Centril:refactor-parser, r=petrochenkov
libsyntax: Refactor `parser.rs` into reasonably sized logical units Here we split `parser.rs` (~7.9 KLOC) into more reasonably sized files (all < 1.8 KLOC): - `./src/libsyntax/parse/` - `parser.rs` - `parser/` - `pat.rs` - `expr.rs` - `stmt.rs` - `ty.rs` - `path.rs` - `generics.rs` - `item.rs` - `module.rs` Closes https://github.com/rust-lang/rust/issues/60015. r? @petrochenkov
This commit is contained in:
commit
72f8043d44
9 changed files with 6324 additions and 6191 deletions
File diff suppressed because it is too large
Load diff
1748
src/libsyntax/parse/parser/expr.rs
Normal file
1748
src/libsyntax/parse/parser/expr.rs
Normal file
File diff suppressed because it is too large
Load diff
276
src/libsyntax/parse/parser/generics.rs
Normal file
276
src/libsyntax/parse/parser/generics.rs
Normal file
|
@ -0,0 +1,276 @@
|
|||
use super::{Parser, PResult};
|
||||
|
||||
use crate::ast::{self, WhereClause, GenericParam, GenericParamKind, GenericBounds, Attribute};
|
||||
use crate::parse::token;
|
||||
use crate::source_map::DUMMY_SP;
|
||||
use crate::symbol::kw;
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
|
||||
///
|
||||
/// ```
|
||||
/// BOUND = LT_BOUND (e.g., `'a`)
|
||||
/// ```
|
||||
fn parse_lt_param_bounds(&mut self) -> GenericBounds {
|
||||
let mut lifetimes = Vec::new();
|
||||
while self.check_lifetime() {
|
||||
lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
|
||||
|
||||
if !self.eat_plus() {
|
||||
break
|
||||
}
|
||||
}
|
||||
lifetimes
|
||||
}
|
||||
|
||||
/// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
|
||||
fn parse_ty_param(&mut self,
|
||||
preceding_attrs: Vec<Attribute>)
|
||||
-> PResult<'a, GenericParam> {
|
||||
let ident = self.parse_ident()?;
|
||||
|
||||
// Parse optional colon and param bounds.
|
||||
let bounds = if self.eat(&token::Colon) {
|
||||
self.parse_generic_bounds(Some(self.prev_span))?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let default = if self.eat(&token::Eq) {
|
||||
Some(self.parse_ty()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(GenericParam {
|
||||
ident,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
attrs: preceding_attrs.into(),
|
||||
bounds,
|
||||
kind: GenericParamKind::Type {
|
||||
default,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
|
||||
self.expect_keyword(kw::Const)?;
|
||||
let ident = self.parse_ident()?;
|
||||
self.expect(&token::Colon)?;
|
||||
let ty = self.parse_ty()?;
|
||||
|
||||
Ok(GenericParam {
|
||||
ident,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
attrs: preceding_attrs.into(),
|
||||
bounds: Vec::new(),
|
||||
kind: GenericParamKind::Const {
|
||||
ty,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses a (possibly empty) list of lifetime and type parameters, possibly including
|
||||
/// a trailing comma and erroneous trailing attributes.
|
||||
crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
|
||||
let mut params = Vec::new();
|
||||
loop {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
if self.check_lifetime() {
|
||||
let lifetime = self.expect_lifetime();
|
||||
// Parse lifetime parameter.
|
||||
let bounds = if self.eat(&token::Colon) {
|
||||
self.parse_lt_param_bounds()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
params.push(ast::GenericParam {
|
||||
ident: lifetime.ident,
|
||||
id: lifetime.id,
|
||||
attrs: attrs.into(),
|
||||
bounds,
|
||||
kind: ast::GenericParamKind::Lifetime,
|
||||
});
|
||||
} else if self.check_keyword(kw::Const) {
|
||||
// Parse const parameter.
|
||||
params.push(self.parse_const_param(attrs)?);
|
||||
} else if self.check_ident() {
|
||||
// Parse type parameter.
|
||||
params.push(self.parse_ty_param(attrs)?);
|
||||
} else {
|
||||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
self.struct_span_err(
|
||||
attrs[0].span,
|
||||
&format!("trailing attribute after generic parameter"),
|
||||
)
|
||||
.span_label(attrs[0].span, "attributes must go before parameters")
|
||||
.emit();
|
||||
} else {
|
||||
self.struct_span_err(
|
||||
attrs[0].span,
|
||||
&format!("attribute without generic parameters"),
|
||||
)
|
||||
.span_label(
|
||||
attrs[0].span,
|
||||
"attributes are only permitted when preceding parameters",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if !self.eat(&token::Comma) {
|
||||
break
|
||||
}
|
||||
}
|
||||
Ok(params)
|
||||
}
|
||||
|
||||
/// Parses a set of optional generic type parameter declarations. Where
|
||||
/// clauses are not parsed here, and must be added later via
|
||||
/// `parse_where_clause()`.
|
||||
///
|
||||
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
|
||||
/// | ( < lifetimes , typaramseq ( , )? > )
|
||||
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
|
||||
pub(super) fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
|
||||
let span_lo = self.token.span;
|
||||
let (params, span) = if self.eat_lt() {
|
||||
let params = self.parse_generic_params()?;
|
||||
self.expect_gt()?;
|
||||
(params, span_lo.to(self.prev_span))
|
||||
} else {
|
||||
(vec![], self.prev_span.between(self.token.span))
|
||||
};
|
||||
Ok(ast::Generics {
|
||||
params,
|
||||
where_clause: WhereClause {
|
||||
predicates: Vec::new(),
|
||||
span: DUMMY_SP,
|
||||
},
|
||||
span,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses an optional where-clause and places it in `generics`.
|
||||
///
|
||||
/// ```ignore (only-for-syntax-highlight)
|
||||
/// where T : Trait<U, V> + 'b, 'a : 'b
|
||||
/// ```
|
||||
pub(super) fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
|
||||
let mut where_clause = WhereClause {
|
||||
predicates: Vec::new(),
|
||||
span: self.prev_span.to(self.prev_span),
|
||||
};
|
||||
|
||||
if !self.eat_keyword(kw::Where) {
|
||||
return Ok(where_clause);
|
||||
}
|
||||
let lo = self.prev_span;
|
||||
|
||||
// We are considering adding generics to the `where` keyword as an alternative higher-rank
|
||||
// parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
|
||||
// change we parse those generics now, but report an error.
|
||||
if self.choose_generics_over_qpath() {
|
||||
let generics = self.parse_generics()?;
|
||||
self.struct_span_err(
|
||||
generics.span,
|
||||
"generic parameters on `where` clauses are reserved for future use",
|
||||
)
|
||||
.span_label(generics.span, "currently unsupported")
|
||||
.emit();
|
||||
}
|
||||
|
||||
loop {
|
||||
let lo = self.token.span;
|
||||
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
|
||||
let lifetime = self.expect_lifetime();
|
||||
// Bounds starting with a colon are mandatory, but possibly empty.
|
||||
self.expect(&token::Colon)?;
|
||||
let bounds = self.parse_lt_param_bounds();
|
||||
where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
|
||||
ast::WhereRegionPredicate {
|
||||
span: lo.to(self.prev_span),
|
||||
lifetime,
|
||||
bounds,
|
||||
}
|
||||
));
|
||||
} else if self.check_type() {
|
||||
// Parse optional `for<'a, 'b>`.
|
||||
// This `for` is parsed greedily and applies to the whole predicate,
|
||||
// the bounded type can have its own `for` applying only to it.
|
||||
// Examples:
|
||||
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
|
||||
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
|
||||
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
|
||||
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
||||
|
||||
// Parse type with mandatory colon and (possibly empty) bounds,
|
||||
// or with mandatory equality sign and the second type.
|
||||
let ty = self.parse_ty()?;
|
||||
if self.eat(&token::Colon) {
|
||||
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
|
||||
where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
|
||||
ast::WhereBoundPredicate {
|
||||
span: lo.to(self.prev_span),
|
||||
bound_generic_params: lifetime_defs,
|
||||
bounded_ty: ty,
|
||||
bounds,
|
||||
}
|
||||
));
|
||||
// FIXME: Decide what should be used here, `=` or `==`.
|
||||
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
|
||||
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
|
||||
let rhs_ty = self.parse_ty()?;
|
||||
where_clause.predicates.push(ast::WherePredicate::EqPredicate(
|
||||
ast::WhereEqPredicate {
|
||||
span: lo.to(self.prev_span),
|
||||
lhs_ty: ty,
|
||||
rhs_ty,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
}
|
||||
));
|
||||
} else {
|
||||
return self.unexpected();
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
|
||||
if !self.eat(&token::Comma) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
where_clause.span = lo.to(self.prev_span);
|
||||
Ok(where_clause)
|
||||
}
|
||||
|
||||
pub(super) fn choose_generics_over_qpath(&self) -> bool {
|
||||
// There's an ambiguity between generic parameters and qualified paths in impls.
|
||||
// If we see `<` it may start both, so we have to inspect some following tokens.
|
||||
// The following combinations can only start generics,
|
||||
// but not qualified paths (with one exception):
|
||||
// `<` `>` - empty generic parameters
|
||||
// `<` `#` - generic parameters with attributes
|
||||
// `<` (LIFETIME|IDENT) `>` - single generic parameter
|
||||
// `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
|
||||
// `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
|
||||
// `<` (LIFETIME|IDENT) `=` - generic parameter with a default
|
||||
// `<` const - generic const parameter
|
||||
// The only truly ambiguous case is
|
||||
// `<` IDENT `>` `::` IDENT ...
|
||||
// we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
|
||||
// because this is what almost always expected in practice, qualified paths in impls
|
||||
// (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
|
||||
self.token == token::Lt &&
|
||||
(self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
|
||||
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
|
||||
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
|
||||
t == &token::Colon || t == &token::Eq) ||
|
||||
self.is_keyword_ahead(1, &[kw::Const]))
|
||||
}
|
||||
}
|
1915
src/libsyntax/parse/parser/item.rs
Normal file
1915
src/libsyntax/parse/parser/item.rs
Normal file
File diff suppressed because it is too large
Load diff
332
src/libsyntax/parse/parser/module.rs
Normal file
332
src/libsyntax/parse/parser/module.rs
Normal file
|
@ -0,0 +1,332 @@
|
|||
use super::{Parser, PResult};
|
||||
use super::item::ItemInfo;
|
||||
|
||||
use crate::attr;
|
||||
use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate};
|
||||
use crate::parse::{new_sub_parser_from_file, DirectoryOwnership};
|
||||
use crate::parse::token::{self, TokenKind};
|
||||
use crate::parse::diagnostics::{Error};
|
||||
use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName};
|
||||
use crate::symbol::sym;
|
||||
|
||||
use std::path::{self, Path, PathBuf};
|
||||
|
||||
/// Information about the path to a module.
|
||||
pub struct ModulePath {
|
||||
name: String,
|
||||
path_exists: bool,
|
||||
pub result: Result<ModulePathSuccess, Error>,
|
||||
}
|
||||
|
||||
pub struct ModulePathSuccess {
|
||||
pub path: PathBuf,
|
||||
pub directory_ownership: DirectoryOwnership,
|
||||
warn: bool,
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses a source module as a crate. This is the main entry point for the parser.
|
||||
pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
|
||||
let lo = self.token.span;
|
||||
let krate = Ok(ast::Crate {
|
||||
attrs: self.parse_inner_attributes()?,
|
||||
module: self.parse_mod_items(&token::Eof, lo)?,
|
||||
span: lo.to(self.token.span),
|
||||
});
|
||||
krate
|
||||
}
|
||||
|
||||
/// Parse a `mod <foo> { ... }` or `mod <foo>;` item
|
||||
pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
|
||||
let (in_cfg, outer_attrs) = {
|
||||
let mut strip_unconfigured = crate::config::StripUnconfigured {
|
||||
sess: self.sess,
|
||||
features: None, // don't perform gated feature checking
|
||||
};
|
||||
let mut outer_attrs = outer_attrs.to_owned();
|
||||
strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
|
||||
(!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
|
||||
};
|
||||
|
||||
let id_span = self.token.span;
|
||||
let id = self.parse_ident()?;
|
||||
if self.eat(&token::Semi) {
|
||||
if in_cfg && self.recurse_into_file_modules {
|
||||
// This mod is in an external file. Let's go get it!
|
||||
let ModulePathSuccess { path, directory_ownership, warn } =
|
||||
self.submod_path(id, &outer_attrs, id_span)?;
|
||||
let (module, mut attrs) =
|
||||
self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?;
|
||||
// Record that we fetched the mod from an external file
|
||||
if warn {
|
||||
let attr = attr::mk_attr_outer(
|
||||
attr::mk_word_item(Ident::with_empty_ctxt(sym::warn_directory_ownership)));
|
||||
attr::mark_known(&attr);
|
||||
attrs.push(attr);
|
||||
}
|
||||
Ok((id, ItemKind::Mod(module), Some(attrs)))
|
||||
} else {
|
||||
let placeholder = ast::Mod {
|
||||
inner: DUMMY_SP,
|
||||
items: Vec::new(),
|
||||
inline: false
|
||||
};
|
||||
Ok((id, ItemKind::Mod(placeholder), None))
|
||||
}
|
||||
} else {
|
||||
let old_directory = self.directory.clone();
|
||||
self.push_directory(id, &outer_attrs);
|
||||
|
||||
self.expect(&token::OpenDelim(token::Brace))?;
|
||||
let mod_inner_lo = self.token.span;
|
||||
let attrs = self.parse_inner_attributes()?;
|
||||
let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
|
||||
|
||||
self.directory = old_directory;
|
||||
Ok((id, ItemKind::Mod(module), Some(attrs)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a termination token, parses all of the items in a module.
|
||||
fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> {
|
||||
let mut items = vec![];
|
||||
while let Some(item) = self.parse_item()? {
|
||||
items.push(item);
|
||||
self.maybe_consume_incorrect_semicolon(&items);
|
||||
}
|
||||
|
||||
if !self.eat(term) {
|
||||
let token_str = self.this_token_descr();
|
||||
if !self.maybe_consume_incorrect_semicolon(&items) {
|
||||
let mut err = self.fatal(&format!("expected item, found {}", token_str));
|
||||
err.span_label(self.token.span, "expected item");
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
|
||||
let hi = if self.token.span.is_dummy() {
|
||||
inner_lo
|
||||
} else {
|
||||
self.prev_span
|
||||
};
|
||||
|
||||
Ok(Mod {
|
||||
inner: inner_lo.to(hi),
|
||||
items,
|
||||
inline: true
|
||||
})
|
||||
}
|
||||
|
||||
fn submod_path(
|
||||
&mut self,
|
||||
id: ast::Ident,
|
||||
outer_attrs: &[Attribute],
|
||||
id_sp: Span
|
||||
) -> PResult<'a, ModulePathSuccess> {
|
||||
if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) {
|
||||
return Ok(ModulePathSuccess {
|
||||
directory_ownership: match path.file_name().and_then(|s| s.to_str()) {
|
||||
// All `#[path]` files are treated as though they are a `mod.rs` file.
|
||||
// This means that `mod foo;` declarations inside `#[path]`-included
|
||||
// files are siblings,
|
||||
//
|
||||
// Note that this will produce weirdness when a file named `foo.rs` is
|
||||
// `#[path]` included and contains a `mod foo;` declaration.
|
||||
// If you encounter this, it's your own darn fault :P
|
||||
Some(_) => DirectoryOwnership::Owned { relative: None },
|
||||
_ => DirectoryOwnership::UnownedViaMod(true),
|
||||
},
|
||||
path,
|
||||
warn: false,
|
||||
});
|
||||
}
|
||||
|
||||
let relative = match self.directory.ownership {
|
||||
DirectoryOwnership::Owned { relative } => relative,
|
||||
DirectoryOwnership::UnownedViaBlock |
|
||||
DirectoryOwnership::UnownedViaMod(_) => None,
|
||||
};
|
||||
let paths = Parser::default_submod_path(
|
||||
id, relative, &self.directory.path, self.sess.source_map());
|
||||
|
||||
match self.directory.ownership {
|
||||
DirectoryOwnership::Owned { .. } => {
|
||||
paths.result.map_err(|err| self.span_fatal_err(id_sp, err))
|
||||
},
|
||||
DirectoryOwnership::UnownedViaBlock => {
|
||||
let msg =
|
||||
"Cannot declare a non-inline module inside a block \
|
||||
unless it has a path attribute";
|
||||
let mut err = self.diagnostic().struct_span_err(id_sp, msg);
|
||||
if paths.path_exists {
|
||||
let msg = format!("Maybe `use` the module `{}` instead of redeclaring it",
|
||||
paths.name);
|
||||
err.span_note(id_sp, &msg);
|
||||
}
|
||||
Err(err)
|
||||
}
|
||||
DirectoryOwnership::UnownedViaMod(warn) => {
|
||||
if warn {
|
||||
if let Ok(result) = paths.result {
|
||||
return Ok(ModulePathSuccess { warn: true, ..result });
|
||||
}
|
||||
}
|
||||
let mut err = self.diagnostic().struct_span_err(id_sp,
|
||||
"cannot declare a new module at this location");
|
||||
if !id_sp.is_dummy() {
|
||||
let src_path = self.sess.source_map().span_to_filename(id_sp);
|
||||
if let FileName::Real(src_path) = src_path {
|
||||
if let Some(stem) = src_path.file_stem() {
|
||||
let mut dest_path = src_path.clone();
|
||||
dest_path.set_file_name(stem);
|
||||
dest_path.push("mod.rs");
|
||||
err.span_note(id_sp,
|
||||
&format!("maybe move this module `{}` to its own \
|
||||
directory via `{}`", src_path.display(),
|
||||
dest_path.display()));
|
||||
}
|
||||
}
|
||||
}
|
||||
if paths.path_exists {
|
||||
err.span_note(id_sp,
|
||||
&format!("... or maybe `use` the module `{}` instead \
|
||||
of possibly redeclaring it",
|
||||
paths.name));
|
||||
}
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
|
||||
if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) {
|
||||
let s = s.as_str();
|
||||
|
||||
// On windows, the base path might have the form
|
||||
// `\\?\foo\bar` in which case it does not tolerate
|
||||
// mixed `/` and `\` separators, so canonicalize
|
||||
// `/` to `\`.
|
||||
#[cfg(windows)]
|
||||
let s = s.replace("/", "\\");
|
||||
Some(dir_path.join(s))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a path to a module.
|
||||
pub fn default_submod_path(
|
||||
id: ast::Ident,
|
||||
relative: Option<ast::Ident>,
|
||||
dir_path: &Path,
|
||||
source_map: &SourceMap) -> ModulePath
|
||||
{
|
||||
// If we're in a foo.rs file instead of a mod.rs file,
|
||||
// we need to look for submodules in
|
||||
// `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than
|
||||
// `./<id>.rs` and `./<id>/mod.rs`.
|
||||
let relative_prefix_string;
|
||||
let relative_prefix = if let Some(ident) = relative {
|
||||
relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR);
|
||||
&relative_prefix_string
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
let mod_name = id.to_string();
|
||||
let default_path_str = format!("{}{}.rs", relative_prefix, mod_name);
|
||||
let secondary_path_str = format!("{}{}{}mod.rs",
|
||||
relative_prefix, mod_name, path::MAIN_SEPARATOR);
|
||||
let default_path = dir_path.join(&default_path_str);
|
||||
let secondary_path = dir_path.join(&secondary_path_str);
|
||||
let default_exists = source_map.file_exists(&default_path);
|
||||
let secondary_exists = source_map.file_exists(&secondary_path);
|
||||
|
||||
let result = match (default_exists, secondary_exists) {
|
||||
(true, false) => Ok(ModulePathSuccess {
|
||||
path: default_path,
|
||||
directory_ownership: DirectoryOwnership::Owned {
|
||||
relative: Some(id),
|
||||
},
|
||||
warn: false,
|
||||
}),
|
||||
(false, true) => Ok(ModulePathSuccess {
|
||||
path: secondary_path,
|
||||
directory_ownership: DirectoryOwnership::Owned {
|
||||
relative: None,
|
||||
},
|
||||
warn: false,
|
||||
}),
|
||||
(false, false) => Err(Error::FileNotFoundForModule {
|
||||
mod_name: mod_name.clone(),
|
||||
default_path: default_path_str,
|
||||
secondary_path: secondary_path_str,
|
||||
dir_path: dir_path.display().to_string(),
|
||||
}),
|
||||
(true, true) => Err(Error::DuplicatePaths {
|
||||
mod_name: mod_name.clone(),
|
||||
default_path: default_path_str,
|
||||
secondary_path: secondary_path_str,
|
||||
}),
|
||||
};
|
||||
|
||||
ModulePath {
|
||||
name: mod_name,
|
||||
path_exists: default_exists || secondary_exists,
|
||||
result,
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads a module from a source file.
|
||||
fn eval_src_mod(
|
||||
&mut self,
|
||||
path: PathBuf,
|
||||
directory_ownership: DirectoryOwnership,
|
||||
name: String,
|
||||
id_sp: Span,
|
||||
) -> PResult<'a, (Mod, Vec<Attribute>)> {
|
||||
let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
|
||||
if let Some(i) = included_mod_stack.iter().position(|p| *p == path) {
|
||||
let mut err = String::from("circular modules: ");
|
||||
let len = included_mod_stack.len();
|
||||
for p in &included_mod_stack[i.. len] {
|
||||
err.push_str(&p.to_string_lossy());
|
||||
err.push_str(" -> ");
|
||||
}
|
||||
err.push_str(&path.to_string_lossy());
|
||||
return Err(self.span_fatal(id_sp, &err[..]));
|
||||
}
|
||||
included_mod_stack.push(path.clone());
|
||||
drop(included_mod_stack);
|
||||
|
||||
let mut p0 =
|
||||
new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp);
|
||||
p0.cfg_mods = self.cfg_mods;
|
||||
let mod_inner_lo = p0.token.span;
|
||||
let mod_attrs = p0.parse_inner_attributes()?;
|
||||
let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?;
|
||||
m0.inline = false;
|
||||
self.sess.included_mod_stack.borrow_mut().pop();
|
||||
Ok((m0, mod_attrs))
|
||||
}
|
||||
|
||||
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
|
||||
if let Some(path) = attr::first_attr_value_str_by_name(attrs, sym::path) {
|
||||
self.directory.path.to_mut().push(&path.as_str());
|
||||
self.directory.ownership = DirectoryOwnership::Owned { relative: None };
|
||||
} else {
|
||||
// We have to push on the current module name in the case of relative
|
||||
// paths in order to ensure that any additional module paths from inline
|
||||
// `mod x { ... }` come after the relative extension.
|
||||
//
|
||||
// For example, a `mod z { ... }` inside `x/y.rs` should set the current
|
||||
// directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
|
||||
if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
|
||||
if let Some(ident) = relative.take() { // remove the relative offset
|
||||
self.directory.path.to_mut().push(ident.as_str());
|
||||
}
|
||||
}
|
||||
self.directory.path.to_mut().push(&id.as_str());
|
||||
}
|
||||
}
|
||||
}
|
634
src/libsyntax/parse/parser/pat.rs
Normal file
634
src/libsyntax/parse/parser/pat.rs
Normal file
|
@ -0,0 +1,634 @@
|
|||
use super::{Parser, PResult, PathStyle};
|
||||
|
||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac_};
|
||||
use crate::ast::{BindingMode, Ident, Mutability, Expr, ExprKind};
|
||||
use crate::parse::token::{self};
|
||||
use crate::print::pprust;
|
||||
use crate::source_map::{respan, Span, Spanned};
|
||||
use crate::symbol::kw;
|
||||
use crate::ThinVec;
|
||||
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses a pattern.
|
||||
pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
|
||||
self.parse_pat_with_range_pat(true, expected)
|
||||
}
|
||||
|
||||
/// Parses patterns, separated by '|' s.
|
||||
pub(super) fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
|
||||
// Allow a '|' before the pats (RFC 1925 + RFC 2530)
|
||||
self.eat(&token::BinOp(token::Or));
|
||||
|
||||
let mut pats = Vec::new();
|
||||
loop {
|
||||
pats.push(self.parse_top_level_pat()?);
|
||||
|
||||
if self.token == token::OrOr {
|
||||
self.struct_span_err(self.token.span, "unexpected token `||` after pattern")
|
||||
.span_suggestion(
|
||||
self.token.span,
|
||||
"use a single `|` to specify multiple patterns",
|
||||
"|".to_owned(),
|
||||
Applicability::MachineApplicable
|
||||
)
|
||||
.emit();
|
||||
self.bump();
|
||||
} else if self.eat(&token::BinOp(token::Or)) {
|
||||
// This is a No-op. Continue the loop to parse the next
|
||||
// pattern.
|
||||
} else {
|
||||
return Ok(pats);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// A wrapper around `parse_pat` with some special error handling for the
|
||||
/// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
|
||||
/// to subpatterns within such).
|
||||
pub(super) fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
|
||||
let pat = self.parse_pat(None)?;
|
||||
if self.token == token::Comma {
|
||||
// An unexpected comma after a top-level pattern is a clue that the
|
||||
// user (perhaps more accustomed to some other language) forgot the
|
||||
// parentheses in what should have been a tuple pattern; return a
|
||||
// suggestion-enhanced error here rather than choking on the comma
|
||||
// later.
|
||||
let comma_span = self.token.span;
|
||||
self.bump();
|
||||
if let Err(mut err) = self.skip_pat_list() {
|
||||
// We didn't expect this to work anyway; we just wanted
|
||||
// to advance to the end of the comma-sequence so we know
|
||||
// the span to suggest parenthesizing
|
||||
err.cancel();
|
||||
}
|
||||
let seq_span = pat.span.to(self.prev_span);
|
||||
let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern");
|
||||
if let Ok(seq_snippet) = self.span_to_snippet(seq_span) {
|
||||
err.span_suggestion(
|
||||
seq_span,
|
||||
"try adding parentheses to match on a tuple..",
|
||||
format!("({})", seq_snippet),
|
||||
Applicability::MachineApplicable
|
||||
).span_suggestion(
|
||||
seq_span,
|
||||
"..or a vertical bar to match on multiple alternatives",
|
||||
format!("{}", seq_snippet.replace(",", " |")),
|
||||
Applicability::MachineApplicable
|
||||
);
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pat)
|
||||
}
|
||||
|
||||
/// Parse and throw away a parentesized comma separated
|
||||
/// sequence of patterns until `)` is reached.
|
||||
fn skip_pat_list(&mut self) -> PResult<'a, ()> {
|
||||
while !self.check(&token::CloseDelim(token::Paren)) {
|
||||
self.parse_pat(None)?;
|
||||
if !self.eat(&token::Comma) {
|
||||
return Ok(())
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
|
||||
/// allowed).
|
||||
fn parse_pat_with_range_pat(
|
||||
&mut self,
|
||||
allow_range_pat: bool,
|
||||
expected: Option<&'static str>,
|
||||
) -> PResult<'a, P<Pat>> {
|
||||
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
||||
maybe_whole!(self, NtPat, |x| x);
|
||||
|
||||
let lo = self.token.span;
|
||||
let pat;
|
||||
match self.token.kind {
|
||||
token::BinOp(token::And) | token::AndAnd => {
|
||||
// Parse &pat / &mut pat
|
||||
self.expect_and()?;
|
||||
let mutbl = self.parse_mutability();
|
||||
if let token::Lifetime(name) = self.token.kind {
|
||||
let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name));
|
||||
err.span_label(self.token.span, "unexpected lifetime");
|
||||
return Err(err);
|
||||
}
|
||||
let subpat = self.parse_pat_with_range_pat(false, expected)?;
|
||||
pat = PatKind::Ref(subpat, mutbl);
|
||||
}
|
||||
token::OpenDelim(token::Paren) => {
|
||||
// Parse a tuple or parenthesis pattern.
|
||||
let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?;
|
||||
|
||||
// Here, `(pat,)` is a tuple pattern.
|
||||
// For backward compatibility, `(..)` is a tuple pattern as well.
|
||||
pat = if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
|
||||
PatKind::Paren(fields.into_iter().nth(0).unwrap())
|
||||
} else {
|
||||
PatKind::Tuple(fields)
|
||||
};
|
||||
}
|
||||
token::OpenDelim(token::Bracket) => {
|
||||
// Parse `[pat, pat,...]` as a slice pattern.
|
||||
let (slice, _) = self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?;
|
||||
pat = PatKind::Slice(slice);
|
||||
}
|
||||
token::DotDot => {
|
||||
self.bump();
|
||||
pat = if self.is_pat_range_end_start() {
|
||||
// Parse `..42` for recovery.
|
||||
self.parse_pat_range_to(RangeEnd::Excluded, "..")?
|
||||
} else {
|
||||
// A rest pattern `..`.
|
||||
PatKind::Rest
|
||||
};
|
||||
}
|
||||
token::DotDotEq => {
|
||||
// Parse `..=42` for recovery.
|
||||
self.bump();
|
||||
pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")?;
|
||||
}
|
||||
token::DotDotDot => {
|
||||
// Parse `...42` for recovery.
|
||||
self.bump();
|
||||
pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotDot), "...")?;
|
||||
}
|
||||
// At this point, token != &, &&, (, [
|
||||
_ => if self.eat_keyword(kw::Underscore) {
|
||||
// Parse _
|
||||
pat = PatKind::Wild;
|
||||
} else if self.eat_keyword(kw::Mut) {
|
||||
// Parse mut ident @ pat / mut ref ident @ pat
|
||||
let mutref_span = self.prev_span.to(self.token.span);
|
||||
let binding_mode = if self.eat_keyword(kw::Ref) {
|
||||
self.diagnostic()
|
||||
.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
|
||||
.span_suggestion(
|
||||
mutref_span,
|
||||
"try switching the order",
|
||||
"ref mut".into(),
|
||||
Applicability::MachineApplicable
|
||||
).emit();
|
||||
BindingMode::ByRef(Mutability::Mutable)
|
||||
} else {
|
||||
BindingMode::ByValue(Mutability::Mutable)
|
||||
};
|
||||
pat = self.parse_pat_ident(binding_mode)?;
|
||||
} else if self.eat_keyword(kw::Ref) {
|
||||
// Parse ref ident @ pat / ref mut ident @ pat
|
||||
let mutbl = self.parse_mutability();
|
||||
pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
|
||||
} else if self.eat_keyword(kw::Box) {
|
||||
// Parse box pat
|
||||
let subpat = self.parse_pat_with_range_pat(false, None)?;
|
||||
pat = PatKind::Box(subpat);
|
||||
} else if self.token.is_ident() && !self.token.is_reserved_ident() &&
|
||||
self.parse_as_ident() {
|
||||
// Parse ident @ pat
|
||||
// This can give false positives and parse nullary enums,
|
||||
// they are dealt with later in resolve
|
||||
let binding_mode = BindingMode::ByValue(Mutability::Immutable);
|
||||
pat = self.parse_pat_ident(binding_mode)?;
|
||||
} else if self.token.is_path_start() {
|
||||
// Parse pattern starting with a path
|
||||
let (qself, path) = if self.eat_lt() {
|
||||
// Parse a qualified path
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
||||
(Some(qself), path)
|
||||
} else {
|
||||
// Parse an unqualified path
|
||||
(None, self.parse_path(PathStyle::Expr)?)
|
||||
};
|
||||
match self.token.kind {
|
||||
token::Not if qself.is_none() => {
|
||||
// Parse macro invocation
|
||||
self.bump();
|
||||
let (delim, tts) = self.expect_delimited_token_tree()?;
|
||||
let mac = respan(lo.to(self.prev_span), Mac_ {
|
||||
path,
|
||||
tts,
|
||||
delim,
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
});
|
||||
pat = PatKind::Mac(mac);
|
||||
}
|
||||
token::DotDotDot | token::DotDotEq | token::DotDot => {
|
||||
let (end_kind, form) = match self.token.kind {
|
||||
token::DotDot => (RangeEnd::Excluded, ".."),
|
||||
token::DotDotDot => (RangeEnd::Included(RangeSyntax::DotDotDot), "..."),
|
||||
token::DotDotEq => (RangeEnd::Included(RangeSyntax::DotDotEq), "..="),
|
||||
_ => panic!("can only parse `..`/`...`/`..=` for ranges \
|
||||
(checked above)"),
|
||||
};
|
||||
let op_span = self.token.span;
|
||||
// Parse range
|
||||
let span = lo.to(self.prev_span);
|
||||
let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
|
||||
self.bump();
|
||||
let end = self.parse_pat_range_end_opt(&begin, form)?;
|
||||
pat = PatKind::Range(begin, end, respan(op_span, end_kind));
|
||||
}
|
||||
token::OpenDelim(token::Brace) => {
|
||||
if qself.is_some() {
|
||||
let msg = "unexpected `{` after qualified path";
|
||||
let mut err = self.fatal(msg);
|
||||
err.span_label(self.token.span, msg);
|
||||
return Err(err);
|
||||
}
|
||||
// Parse struct pattern
|
||||
self.bump();
|
||||
let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
|
||||
e.emit();
|
||||
self.recover_stmt();
|
||||
(vec![], true)
|
||||
});
|
||||
self.bump();
|
||||
pat = PatKind::Struct(path, fields, etc);
|
||||
}
|
||||
token::OpenDelim(token::Paren) => {
|
||||
if qself.is_some() {
|
||||
let msg = "unexpected `(` after qualified path";
|
||||
let mut err = self.fatal(msg);
|
||||
err.span_label(self.token.span, msg);
|
||||
return Err(err);
|
||||
}
|
||||
// Parse tuple struct or enum pattern
|
||||
let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?;
|
||||
pat = PatKind::TupleStruct(path, fields)
|
||||
}
|
||||
_ => pat = PatKind::Path(qself, path),
|
||||
}
|
||||
} else {
|
||||
// Try to parse everything else as literal with optional minus
|
||||
match self.parse_literal_maybe_minus() {
|
||||
Ok(begin) => {
|
||||
let op_span = self.token.span;
|
||||
if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
|
||||
self.check(&token::DotDotDot) {
|
||||
let (end_kind, form) = if self.eat(&token::DotDotDot) {
|
||||
(RangeEnd::Included(RangeSyntax::DotDotDot), "...")
|
||||
} else if self.eat(&token::DotDotEq) {
|
||||
(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")
|
||||
} else if self.eat(&token::DotDot) {
|
||||
(RangeEnd::Excluded, "..")
|
||||
} else {
|
||||
panic!("impossible case: we already matched \
|
||||
on a range-operator token")
|
||||
};
|
||||
let end = self.parse_pat_range_end_opt(&begin, form)?;
|
||||
pat = PatKind::Range(begin, end, respan(op_span, end_kind))
|
||||
} else {
|
||||
pat = PatKind::Lit(begin);
|
||||
}
|
||||
}
|
||||
Err(mut err) => {
|
||||
self.cancel(&mut err);
|
||||
let expected = expected.unwrap_or("pattern");
|
||||
let msg = format!(
|
||||
"expected {}, found {}",
|
||||
expected,
|
||||
self.this_token_descr(),
|
||||
);
|
||||
let mut err = self.fatal(&msg);
|
||||
err.span_label(self.token.span, format!("expected {}", expected));
|
||||
let sp = self.sess.source_map().start_point(self.token.span);
|
||||
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
|
||||
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let pat = self.mk_pat(lo.to(self.prev_span), pat);
|
||||
let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
|
||||
|
||||
if !allow_range_pat {
|
||||
match pat.node {
|
||||
PatKind::Range(
|
||||
_, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
|
||||
) => {},
|
||||
PatKind::Range(..) => {
|
||||
let mut err = self.struct_span_err(
|
||||
pat.span,
|
||||
"the range pattern here has ambiguous interpretation",
|
||||
);
|
||||
err.span_suggestion(
|
||||
pat.span,
|
||||
"add parentheses to clarify the precedence",
|
||||
format!("({})", pprust::pat_to_string(&pat)),
|
||||
// "ambiguous interpretation" implies that we have to be guessing
|
||||
Applicability::MaybeIncorrect
|
||||
);
|
||||
return Err(err);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(pat)
|
||||
}
|
||||
|
||||
// Helper function to decide whether to parse as ident binding
|
||||
// or to try to do something more complex like range patterns.
|
||||
fn parse_as_ident(&mut self) -> bool {
|
||||
self.look_ahead(1, |t| match t.kind {
|
||||
token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
|
||||
token::DotDotDot | token::DotDotEq | token::DotDot |
|
||||
token::ModSep | token::Not => false,
|
||||
_ => true,
|
||||
})
|
||||
}
|
||||
|
||||
/// Is the current token suitable as the start of a range patterns end?
|
||||
fn is_pat_range_end_start(&self) -> bool {
|
||||
self.token.is_path_start() // e.g. `MY_CONST`;
|
||||
|| self.token == token::Dot // e.g. `.5` for recovery;
|
||||
|| self.token.can_begin_literal_or_bool() // e.g. `42`.
|
||||
|| self.token.is_whole_expr()
|
||||
}
|
||||
|
||||
/// Parse a range-to pattern, e.g. `..X` and `..=X` for recovery.
|
||||
fn parse_pat_range_to(&mut self, re: RangeEnd, form: &str) -> PResult<'a, PatKind> {
|
||||
let lo = self.prev_span;
|
||||
let end = self.parse_pat_range_end()?;
|
||||
let range_span = lo.to(end.span);
|
||||
let begin = self.mk_expr(range_span, ExprKind::Err, ThinVec::new());
|
||||
|
||||
self.diagnostic()
|
||||
.struct_span_err(range_span, &format!("`{}X` range patterns are not supported", form))
|
||||
.span_suggestion(
|
||||
range_span,
|
||||
"try using the minimum value for the type",
|
||||
format!("MIN{}{}", form, pprust::expr_to_string(&end)),
|
||||
Applicability::HasPlaceholders,
|
||||
)
|
||||
.emit();
|
||||
|
||||
Ok(PatKind::Range(begin, end, respan(lo, re)))
|
||||
}
|
||||
|
||||
/// Parse the end of a `X..Y`, `X..=Y`, or `X...Y` range pattern or recover
|
||||
/// if that end is missing treating it as `X..`, `X..=`, or `X...` respectively.
|
||||
fn parse_pat_range_end_opt(&mut self, begin: &Expr, form: &str) -> PResult<'a, P<Expr>> {
|
||||
if self.is_pat_range_end_start() {
|
||||
// Parsing e.g. `X..=Y`.
|
||||
self.parse_pat_range_end()
|
||||
} else {
|
||||
// Parsing e.g. `X..`.
|
||||
let range_span = begin.span.to(self.prev_span);
|
||||
|
||||
self.diagnostic()
|
||||
.struct_span_err(
|
||||
range_span,
|
||||
&format!("`X{}` range patterns are not supported", form),
|
||||
)
|
||||
.span_suggestion(
|
||||
range_span,
|
||||
"try using the maximum value for the type",
|
||||
format!("{}{}MAX", pprust::expr_to_string(&begin), form),
|
||||
Applicability::HasPlaceholders,
|
||||
)
|
||||
.emit();
|
||||
|
||||
Ok(self.mk_expr(range_span, ExprKind::Err, ThinVec::new()))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
|
||||
if self.token.is_path_start() {
|
||||
let lo = self.token.span;
|
||||
let (qself, path) = if self.eat_lt() {
|
||||
// Parse a qualified path
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
||||
(Some(qself), path)
|
||||
} else {
|
||||
// Parse an unqualified path
|
||||
(None, self.parse_path(PathStyle::Expr)?)
|
||||
};
|
||||
let hi = self.prev_span;
|
||||
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new()))
|
||||
} else {
|
||||
self.parse_literal_maybe_minus()
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses `ident` or `ident @ pat`.
|
||||
/// used by the copy foo and ref foo patterns to give a good
|
||||
/// error message when parsing mistakes like `ref foo(a, b)`.
|
||||
fn parse_pat_ident(&mut self,
|
||||
binding_mode: ast::BindingMode)
|
||||
-> PResult<'a, PatKind> {
|
||||
let ident = self.parse_ident()?;
|
||||
let sub = if self.eat(&token::At) {
|
||||
Some(self.parse_pat(Some("binding pattern"))?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// just to be friendly, if they write something like
|
||||
// ref Some(i)
|
||||
// we end up here with ( as the current token. This shortly
|
||||
// leads to a parse error. Note that if there is no explicit
|
||||
// binding mode then we do not end up here, because the lookahead
|
||||
// will direct us over to parse_enum_variant()
|
||||
if self.token == token::OpenDelim(token::Paren) {
|
||||
return Err(self.span_fatal(
|
||||
self.prev_span,
|
||||
"expected identifier, found enum pattern"))
|
||||
}
|
||||
|
||||
Ok(PatKind::Ident(binding_mode, ident, sub))
|
||||
}
|
||||
|
||||
/// Parses the fields of a struct-like pattern.
|
||||
fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<Spanned<FieldPat>>, bool)> {
|
||||
let mut fields = Vec::new();
|
||||
let mut etc = false;
|
||||
let mut ate_comma = true;
|
||||
let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
|
||||
let mut etc_span = None;
|
||||
|
||||
while self.token != token::CloseDelim(token::Brace) {
|
||||
let attrs = match self.parse_outer_attributes() {
|
||||
Ok(attrs) => attrs,
|
||||
Err(err) => {
|
||||
if let Some(mut delayed) = delayed_err {
|
||||
delayed.emit();
|
||||
}
|
||||
return Err(err);
|
||||
},
|
||||
};
|
||||
let lo = self.token.span;
|
||||
|
||||
// check that a comma comes after every field
|
||||
if !ate_comma {
|
||||
let err = self.struct_span_err(self.prev_span, "expected `,`");
|
||||
if let Some(mut delayed) = delayed_err {
|
||||
delayed.emit();
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
ate_comma = false;
|
||||
|
||||
if self.check(&token::DotDot) || self.token == token::DotDotDot {
|
||||
etc = true;
|
||||
let mut etc_sp = self.token.span;
|
||||
|
||||
if self.token == token::DotDotDot { // Issue #46718
|
||||
// Accept `...` as if it were `..` to avoid further errors
|
||||
self.struct_span_err(self.token.span, "expected field pattern, found `...`")
|
||||
.span_suggestion(
|
||||
self.token.span,
|
||||
"to omit remaining fields, use one fewer `.`",
|
||||
"..".to_owned(),
|
||||
Applicability::MachineApplicable
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
self.bump(); // `..` || `...`
|
||||
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
etc_span = Some(etc_sp);
|
||||
break;
|
||||
}
|
||||
let token_str = self.this_token_descr();
|
||||
let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
|
||||
|
||||
err.span_label(self.token.span, "expected `}`");
|
||||
let mut comma_sp = None;
|
||||
if self.token == token::Comma { // Issue #49257
|
||||
let nw_span = self.sess.source_map().span_until_non_whitespace(self.token.span);
|
||||
etc_sp = etc_sp.to(nw_span);
|
||||
err.span_label(etc_sp,
|
||||
"`..` must be at the end and cannot have a trailing comma");
|
||||
comma_sp = Some(self.token.span);
|
||||
self.bump();
|
||||
ate_comma = true;
|
||||
}
|
||||
|
||||
etc_span = Some(etc_sp.until(self.token.span));
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
// If the struct looks otherwise well formed, recover and continue.
|
||||
if let Some(sp) = comma_sp {
|
||||
err.span_suggestion_short(
|
||||
sp,
|
||||
"remove this comma",
|
||||
String::new(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
break;
|
||||
} else if self.token.is_ident() && ate_comma {
|
||||
// Accept fields coming after `..,`.
|
||||
// This way we avoid "pattern missing fields" errors afterwards.
|
||||
// We delay this error until the end in order to have a span for a
|
||||
// suggested fix.
|
||||
if let Some(mut delayed_err) = delayed_err {
|
||||
delayed_err.emit();
|
||||
return Err(err);
|
||||
} else {
|
||||
delayed_err = Some(err);
|
||||
}
|
||||
} else {
|
||||
if let Some(mut err) = delayed_err {
|
||||
err.emit();
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
|
||||
fields.push(match self.parse_pat_field(lo, attrs) {
|
||||
Ok(field) => field,
|
||||
Err(err) => {
|
||||
if let Some(mut delayed_err) = delayed_err {
|
||||
delayed_err.emit();
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
});
|
||||
ate_comma = self.eat(&token::Comma);
|
||||
}
|
||||
|
||||
if let Some(mut err) = delayed_err {
|
||||
if let Some(etc_span) = etc_span {
|
||||
err.multipart_suggestion(
|
||||
"move the `..` to the end of the field list",
|
||||
vec![
|
||||
(etc_span, String::new()),
|
||||
(self.token.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
|
||||
],
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
return Ok((fields, etc));
|
||||
}
|
||||
|
||||
fn parse_pat_field(
|
||||
&mut self,
|
||||
lo: Span,
|
||||
attrs: Vec<Attribute>
|
||||
) -> PResult<'a, Spanned<FieldPat>> {
|
||||
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
||||
let hi;
|
||||
let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
|
||||
// Parsing a pattern of the form "fieldname: pat"
|
||||
let fieldname = self.parse_field_name()?;
|
||||
self.bump();
|
||||
let pat = self.parse_pat(None)?;
|
||||
hi = pat.span;
|
||||
(pat, fieldname, false)
|
||||
} else {
|
||||
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
|
||||
let is_box = self.eat_keyword(kw::Box);
|
||||
let boxed_span = self.token.span;
|
||||
let is_ref = self.eat_keyword(kw::Ref);
|
||||
let is_mut = self.eat_keyword(kw::Mut);
|
||||
let fieldname = self.parse_ident()?;
|
||||
hi = self.prev_span;
|
||||
|
||||
let bind_type = match (is_ref, is_mut) {
|
||||
(true, true) => BindingMode::ByRef(Mutability::Mutable),
|
||||
(true, false) => BindingMode::ByRef(Mutability::Immutable),
|
||||
(false, true) => BindingMode::ByValue(Mutability::Mutable),
|
||||
(false, false) => BindingMode::ByValue(Mutability::Immutable),
|
||||
};
|
||||
|
||||
let fieldpat = self.mk_pat_ident(boxed_span.to(hi), bind_type, fieldname);
|
||||
let subpat = if is_box {
|
||||
self.mk_pat(lo.to(hi), PatKind::Box(fieldpat))
|
||||
} else {
|
||||
fieldpat
|
||||
};
|
||||
(subpat, fieldname, true)
|
||||
};
|
||||
|
||||
Ok(Spanned {
|
||||
span: lo.to(hi),
|
||||
node: FieldPat {
|
||||
ident: fieldname,
|
||||
pat: subpat,
|
||||
is_shorthand,
|
||||
attrs: attrs.into(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn mk_pat_ident(&self, span: Span, bm: BindingMode, ident: Ident) -> P<Pat> {
|
||||
self.mk_pat(span, PatKind::Ident(bm, ident, None))
|
||||
}
|
||||
|
||||
fn mk_pat(&self, span: Span, node: PatKind) -> P<Pat> {
|
||||
P(Pat { node, span, id: ast::DUMMY_NODE_ID })
|
||||
}
|
||||
}
|
474
src/libsyntax/parse/parser/path.rs
Normal file
474
src/libsyntax/parse/parser/path.rs
Normal file
|
@ -0,0 +1,474 @@
|
|||
use super::{Parser, PResult, TokenType};
|
||||
|
||||
use crate::{maybe_whole, ThinVec};
|
||||
use crate::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs};
|
||||
use crate::ast::{AnonConst, GenericArg, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::source_map::{Span, BytePos};
|
||||
use crate::symbol::kw;
|
||||
|
||||
use std::mem;
|
||||
use log::debug;
|
||||
use errors::{Applicability};
|
||||
|
||||
/// Specifies how to parse a path.
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub enum PathStyle {
|
||||
/// In some contexts, notably in expressions, paths with generic arguments are ambiguous
|
||||
/// with something else. For example, in expressions `segment < ....` can be interpreted
|
||||
/// as a comparison and `segment ( ....` can be interpreted as a function call.
|
||||
/// In all such contexts the non-path interpretation is preferred by default for practical
|
||||
/// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
|
||||
/// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
|
||||
Expr,
|
||||
/// In other contexts, notably in types, no ambiguity exists and paths can be written
|
||||
/// without the disambiguator, e.g., `x<y>` - unambiguously a path.
|
||||
/// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
|
||||
Type,
|
||||
/// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
|
||||
/// visibilities or attributes.
|
||||
/// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
|
||||
/// (paths in "mod" contexts have to be checked later for absence of generic arguments
|
||||
/// anyway, due to macros), but it is used to avoid weird suggestions about expected
|
||||
/// tokens when something goes wrong.
|
||||
Mod,
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses a qualified path.
|
||||
/// Assumes that the leading `<` has been parsed already.
|
||||
///
|
||||
/// `qualified_path = <type [as trait_ref]>::path`
|
||||
///
|
||||
/// # Examples
|
||||
/// `<T>::default`
|
||||
/// `<T as U>::a`
|
||||
/// `<T as U>::F::a<S>` (without disambiguator)
|
||||
/// `<T as U>::F::a::<S>` (with disambiguator)
|
||||
pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, Path)> {
|
||||
let lo = self.prev_span;
|
||||
let ty = self.parse_ty()?;
|
||||
|
||||
// `path` will contain the prefix of the path up to the `>`,
|
||||
// if any (e.g., `U` in the `<T as U>::*` examples
|
||||
// above). `path_span` has the span of that path, or an empty
|
||||
// span in the case of something like `<T>::Bar`.
|
||||
let (mut path, path_span);
|
||||
if self.eat_keyword(kw::As) {
|
||||
let path_lo = self.token.span;
|
||||
path = self.parse_path(PathStyle::Type)?;
|
||||
path_span = path_lo.to(self.prev_span);
|
||||
} else {
|
||||
path_span = self.token.span.to(self.token.span);
|
||||
path = ast::Path { segments: Vec::new(), span: path_span };
|
||||
}
|
||||
|
||||
// See doc comment for `unmatched_angle_bracket_count`.
|
||||
self.expect(&token::Gt)?;
|
||||
if self.unmatched_angle_bracket_count > 0 {
|
||||
self.unmatched_angle_bracket_count -= 1;
|
||||
debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
|
||||
}
|
||||
|
||||
self.expect(&token::ModSep)?;
|
||||
|
||||
let qself = QSelf { ty, path_span, position: path.segments.len() };
|
||||
self.parse_path_segments(&mut path.segments, style)?;
|
||||
|
||||
Ok((qself, Path { segments: path.segments, span: lo.to(self.prev_span) }))
|
||||
}
|
||||
|
||||
/// Parses simple paths.
|
||||
///
|
||||
/// `path = [::] segment+`
|
||||
/// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
|
||||
///
|
||||
/// # Examples
|
||||
/// `a::b::C<D>` (without disambiguator)
|
||||
/// `a::b::C::<D>` (with disambiguator)
|
||||
/// `Fn(Args)` (without disambiguator)
|
||||
/// `Fn::(Args)` (with disambiguator)
|
||||
pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> {
|
||||
maybe_whole!(self, NtPath, |path| {
|
||||
if style == PathStyle::Mod &&
|
||||
path.segments.iter().any(|segment| segment.args.is_some()) {
|
||||
self.diagnostic().span_err(path.span, "unexpected generic arguments in path");
|
||||
}
|
||||
path
|
||||
});
|
||||
|
||||
let lo = self.meta_var_span.unwrap_or(self.token.span);
|
||||
let mut segments = Vec::new();
|
||||
let mod_sep_ctxt = self.token.span.ctxt();
|
||||
if self.eat(&token::ModSep) {
|
||||
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
|
||||
}
|
||||
self.parse_path_segments(&mut segments, style)?;
|
||||
|
||||
Ok(Path { segments, span: lo.to(self.prev_span) })
|
||||
}
|
||||
|
||||
/// Like `parse_path`, but also supports parsing `Word` meta items into paths for
|
||||
/// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
|
||||
/// attributes.
|
||||
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> {
|
||||
let meta_ident = match self.token.kind {
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtMeta(ref meta) => match meta.node {
|
||||
ast::MetaItemKind::Word => Some(meta.path.clone()),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
if let Some(path) = meta_ident {
|
||||
self.bump();
|
||||
return Ok(path);
|
||||
}
|
||||
self.parse_path(style)
|
||||
}
|
||||
|
||||
crate fn parse_path_segments(&mut self,
|
||||
segments: &mut Vec<PathSegment>,
|
||||
style: PathStyle)
|
||||
-> PResult<'a, ()> {
|
||||
loop {
|
||||
let segment = self.parse_path_segment(style)?;
|
||||
if style == PathStyle::Expr {
|
||||
// In order to check for trailing angle brackets, we must have finished
|
||||
// recursing (`parse_path_segment` can indirectly call this function),
|
||||
// that is, the next token must be the highlighted part of the below example:
|
||||
//
|
||||
// `Foo::<Bar as Baz<T>>::Qux`
|
||||
// ^ here
|
||||
//
|
||||
// As opposed to the below highlight (if we had only finished the first
|
||||
// recursion):
|
||||
//
|
||||
// `Foo::<Bar as Baz<T>>::Qux`
|
||||
// ^ here
|
||||
//
|
||||
// `PathStyle::Expr` is only provided at the root invocation and never in
|
||||
// `parse_path_segment` to recurse and therefore can be checked to maintain
|
||||
// this invariant.
|
||||
self.check_trailing_angle_brackets(&segment, token::ModSep);
|
||||
}
|
||||
segments.push(segment);
|
||||
|
||||
if self.is_import_coupler() || !self.eat(&token::ModSep) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
|
||||
let ident = self.parse_path_segment_ident()?;
|
||||
|
||||
let is_args_start = |token: &Token| match token.kind {
|
||||
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
|
||||
| token::LArrow => true,
|
||||
_ => false,
|
||||
};
|
||||
let check_args_start = |this: &mut Self| {
|
||||
this.expected_tokens.extend_from_slice(
|
||||
&[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))]
|
||||
);
|
||||
is_args_start(&this.token)
|
||||
};
|
||||
|
||||
Ok(if style == PathStyle::Type && check_args_start(self) ||
|
||||
style != PathStyle::Mod && self.check(&token::ModSep)
|
||||
&& self.look_ahead(1, |t| is_args_start(t)) {
|
||||
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
|
||||
// it isn't, then we reset the unmatched angle bracket count as we're about to start
|
||||
// parsing a new path.
|
||||
if style == PathStyle::Expr {
|
||||
self.unmatched_angle_bracket_count = 0;
|
||||
self.max_angle_bracket_count = 0;
|
||||
}
|
||||
|
||||
// Generic arguments are found - `<`, `(`, `::<` or `::(`.
|
||||
self.eat(&token::ModSep);
|
||||
let lo = self.token.span;
|
||||
let args = if self.eat_lt() {
|
||||
// `<'a, T, A = U>`
|
||||
let (args, constraints) =
|
||||
self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?;
|
||||
self.expect_gt()?;
|
||||
let span = lo.to(self.prev_span);
|
||||
AngleBracketedArgs { args, constraints, span }.into()
|
||||
} else {
|
||||
// `(T, U) -> R`
|
||||
let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
|
||||
let span = lo.to(self.prev_span);
|
||||
let output = if self.eat(&token::RArrow) {
|
||||
Some(self.parse_ty_common(false, false, false)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
ParenthesizedArgs { inputs, output, span }.into()
|
||||
};
|
||||
|
||||
PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
|
||||
} else {
|
||||
// Generic arguments are not found.
|
||||
PathSegment::from_ident(ident)
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
|
||||
match self.token.kind {
|
||||
token::Ident(name, _) if name.is_path_segment_keyword() => {
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
Ok(Ident::new(name, span))
|
||||
}
|
||||
_ => self.parse_ident(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
|
||||
/// For the purposes of understanding the parsing logic of generic arguments, this function
|
||||
/// can be thought of being the same as just calling `self.parse_generic_args()` if the source
|
||||
/// had the correct amount of leading angle brackets.
|
||||
///
|
||||
/// ```ignore (diagnostics)
|
||||
/// bar::<<<<T as Foo>::Output>();
|
||||
/// ^^ help: remove extra angle brackets
|
||||
/// ```
|
||||
fn parse_generic_args_with_leaning_angle_bracket_recovery(
|
||||
&mut self,
|
||||
style: PathStyle,
|
||||
lo: Span,
|
||||
) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> {
|
||||
// We need to detect whether there are extra leading left angle brackets and produce an
|
||||
// appropriate error and suggestion. This cannot be implemented by looking ahead at
|
||||
// upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
|
||||
// then there won't be matching `>` tokens to find.
|
||||
//
|
||||
// To explain how this detection works, consider the following example:
|
||||
//
|
||||
// ```ignore (diagnostics)
|
||||
// bar::<<<<T as Foo>::Output>();
|
||||
// ^^ help: remove extra angle brackets
|
||||
// ```
|
||||
//
|
||||
// Parsing of the left angle brackets starts in this function. We start by parsing the
|
||||
// `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
|
||||
// `eat_lt`):
|
||||
//
|
||||
// *Upcoming tokens:* `<<<<T as Foo>::Output>;`
|
||||
// *Unmatched count:* 1
|
||||
// *`parse_path_segment` calls deep:* 0
|
||||
//
|
||||
// This has the effect of recursing as this function is called if a `<` character
|
||||
// is found within the expected generic arguments:
|
||||
//
|
||||
// *Upcoming tokens:* `<<<T as Foo>::Output>;`
|
||||
// *Unmatched count:* 2
|
||||
// *`parse_path_segment` calls deep:* 1
|
||||
//
|
||||
// Eventually we will have recursed until having consumed all of the `<` tokens and
|
||||
// this will be reflected in the count:
|
||||
//
|
||||
// *Upcoming tokens:* `T as Foo>::Output>;`
|
||||
// *Unmatched count:* 4
|
||||
// `parse_path_segment` calls deep:* 3
|
||||
//
|
||||
// The parser will continue until reaching the first `>` - this will decrement the
|
||||
// unmatched angle bracket count and return to the parent invocation of this function
|
||||
// having succeeded in parsing:
|
||||
//
|
||||
// *Upcoming tokens:* `::Output>;`
|
||||
// *Unmatched count:* 3
|
||||
// *`parse_path_segment` calls deep:* 2
|
||||
//
|
||||
// This will continue until the next `>` character which will also return successfully
|
||||
// to the parent invocation of this function and decrement the count:
|
||||
//
|
||||
// *Upcoming tokens:* `;`
|
||||
// *Unmatched count:* 2
|
||||
// *`parse_path_segment` calls deep:* 1
|
||||
//
|
||||
// At this point, this function will expect to find another matching `>` character but
|
||||
// won't be able to and will return an error. This will continue all the way up the
|
||||
// call stack until the first invocation:
|
||||
//
|
||||
// *Upcoming tokens:* `;`
|
||||
// *Unmatched count:* 2
|
||||
// *`parse_path_segment` calls deep:* 0
|
||||
//
|
||||
// In doing this, we have managed to work out how many unmatched leading left angle
|
||||
// brackets there are, but we cannot recover as the unmatched angle brackets have
|
||||
// already been consumed. To remedy this, we keep a snapshot of the parser state
|
||||
// before we do the above. We can then inspect whether we ended up with a parsing error
|
||||
// and unmatched left angle brackets and if so, restore the parser state before we
|
||||
// consumed any `<` characters to emit an error and consume the erroneous tokens to
|
||||
// recover by attempting to parse again.
|
||||
//
|
||||
// In practice, the recursion of this function is indirect and there will be other
|
||||
// locations that consume some `<` characters - as long as we update the count when
|
||||
// this happens, it isn't an issue.
|
||||
|
||||
let is_first_invocation = style == PathStyle::Expr;
|
||||
// Take a snapshot before attempting to parse - we can restore this later.
|
||||
let snapshot = if is_first_invocation {
|
||||
Some(self.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
|
||||
match self.parse_generic_args() {
|
||||
Ok(value) => Ok(value),
|
||||
Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
|
||||
// Cancel error from being unable to find `>`. We know the error
|
||||
// must have been this due to a non-zero unmatched angle bracket
|
||||
// count.
|
||||
e.cancel();
|
||||
|
||||
// Swap `self` with our backup of the parser state before attempting to parse
|
||||
// generic arguments.
|
||||
let snapshot = mem::replace(self, snapshot.unwrap());
|
||||
|
||||
debug!(
|
||||
"parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
|
||||
snapshot.count={:?}",
|
||||
snapshot.unmatched_angle_bracket_count,
|
||||
);
|
||||
|
||||
// Eat the unmatched angle brackets.
|
||||
for _ in 0..snapshot.unmatched_angle_bracket_count {
|
||||
self.eat_lt();
|
||||
}
|
||||
|
||||
// Make a span over ${unmatched angle bracket count} characters.
|
||||
let span = lo.with_hi(
|
||||
lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)
|
||||
);
|
||||
let plural = snapshot.unmatched_angle_bracket_count > 1;
|
||||
self.diagnostic()
|
||||
.struct_span_err(
|
||||
span,
|
||||
&format!(
|
||||
"unmatched angle bracket{}",
|
||||
if plural { "s" } else { "" }
|
||||
),
|
||||
)
|
||||
.span_suggestion(
|
||||
span,
|
||||
&format!(
|
||||
"remove extra angle bracket{}",
|
||||
if plural { "s" } else { "" }
|
||||
),
|
||||
String::new(),
|
||||
Applicability::MachineApplicable,
|
||||
)
|
||||
.emit();
|
||||
|
||||
// Try again without unmatched angle bracket characters.
|
||||
self.parse_generic_args()
|
||||
},
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses (possibly empty) list of lifetime and type arguments and associated type bindings,
|
||||
/// possibly including trailing comma.
|
||||
fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> {
|
||||
let mut args = Vec::new();
|
||||
let mut constraints = Vec::new();
|
||||
let mut misplaced_assoc_ty_constraints: Vec<Span> = Vec::new();
|
||||
let mut assoc_ty_constraints: Vec<Span> = Vec::new();
|
||||
|
||||
let args_lo = self.token.span;
|
||||
|
||||
loop {
|
||||
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
|
||||
// Parse lifetime argument.
|
||||
args.push(GenericArg::Lifetime(self.expect_lifetime()));
|
||||
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
|
||||
} else if self.check_ident() && self.look_ahead(1,
|
||||
|t| t == &token::Eq || t == &token::Colon) {
|
||||
// Parse associated type constraint.
|
||||
let lo = self.token.span;
|
||||
let ident = self.parse_ident()?;
|
||||
let kind = if self.eat(&token::Eq) {
|
||||
AssocTyConstraintKind::Equality {
|
||||
ty: self.parse_ty()?,
|
||||
}
|
||||
} else if self.eat(&token::Colon) {
|
||||
AssocTyConstraintKind::Bound {
|
||||
bounds: self.parse_generic_bounds(Some(self.prev_span))?,
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
};
|
||||
let span = lo.to(self.prev_span);
|
||||
constraints.push(AssocTyConstraint {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
ident,
|
||||
kind,
|
||||
span,
|
||||
});
|
||||
assoc_ty_constraints.push(span);
|
||||
} else if self.check_const_arg() {
|
||||
// Parse const argument.
|
||||
let expr = if let token::OpenDelim(token::Brace) = self.token.kind {
|
||||
self.parse_block_expr(
|
||||
None, self.token.span, BlockCheckMode::Default, ThinVec::new()
|
||||
)?
|
||||
} else if self.token.is_ident() {
|
||||
// FIXME(const_generics): to distinguish between idents for types and consts,
|
||||
// we should introduce a GenericArg::Ident in the AST and distinguish when
|
||||
// lowering to the HIR. For now, idents for const args are not permitted.
|
||||
if self.token.is_keyword(kw::True) || self.token.is_keyword(kw::False) {
|
||||
self.parse_literal_maybe_minus()?
|
||||
} else {
|
||||
return Err(
|
||||
self.fatal("identifiers may currently not be used for const generics")
|
||||
);
|
||||
}
|
||||
} else {
|
||||
self.parse_literal_maybe_minus()?
|
||||
};
|
||||
let value = AnonConst {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
value: expr,
|
||||
};
|
||||
args.push(GenericArg::Const(value));
|
||||
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
|
||||
} else if self.check_type() {
|
||||
// Parse type argument.
|
||||
args.push(GenericArg::Type(self.parse_ty()?));
|
||||
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
|
||||
} else {
|
||||
break
|
||||
}
|
||||
|
||||
if !self.eat(&token::Comma) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: we would like to report this in ast_validation instead, but we currently do not
|
||||
// preserve ordering of generic parameters with respect to associated type binding, so we
|
||||
// lose that information after parsing.
|
||||
if misplaced_assoc_ty_constraints.len() > 0 {
|
||||
let mut err = self.struct_span_err(
|
||||
args_lo.to(self.prev_span),
|
||||
"associated type bindings must be declared after generic parameters",
|
||||
);
|
||||
for span in misplaced_assoc_ty_constraints {
|
||||
err.span_label(
|
||||
span,
|
||||
"this associated type binding should be moved after the generic parameters",
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
|
||||
Ok((args, constraints))
|
||||
}
|
||||
}
|
458
src/libsyntax/parse/parser/stmt.rs
Normal file
458
src/libsyntax/parse/parser/stmt.rs
Normal file
|
@ -0,0 +1,458 @@
|
|||
use super::{Parser, PResult, Restrictions, PrevTokenKind, SemiColonMode, BlockMode};
|
||||
use super::expr::LhsExpr;
|
||||
use super::path::PathStyle;
|
||||
|
||||
use crate::ptr::P;
|
||||
use crate::{maybe_whole, ThinVec};
|
||||
use crate::ast::{self, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
|
||||
use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac_, MacDelimiter};
|
||||
use crate::ext::base::DummyResult;
|
||||
use crate::parse::{classify, DirectoryOwnership};
|
||||
use crate::parse::diagnostics::Error;
|
||||
use crate::parse::token::{self};
|
||||
use crate::source_map::{respan, Span};
|
||||
use crate::symbol::{kw, sym};
|
||||
|
||||
use std::mem;
|
||||
use errors::Applicability;
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parse a statement. This stops just before trailing semicolons on everything but items.
|
||||
/// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
|
||||
pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
|
||||
Ok(self.parse_stmt_(true))
|
||||
}
|
||||
|
||||
fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
|
||||
self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
|
||||
e.emit();
|
||||
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_stmt_without_recovery(
|
||||
&mut self,
|
||||
macro_legacy_warnings: bool,
|
||||
) -> PResult<'a, Option<Stmt>> {
|
||||
maybe_whole!(self, NtStmt, |x| Some(x));
|
||||
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let lo = self.token.span;
|
||||
|
||||
Ok(Some(if self.eat_keyword(kw::Let) {
|
||||
Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: StmtKind::Local(self.parse_local(attrs.into())?),
|
||||
span: lo.to(self.prev_span),
|
||||
}
|
||||
} else if let Some(macro_def) = self.eat_macro_def(
|
||||
&attrs,
|
||||
&respan(lo, VisibilityKind::Inherited),
|
||||
lo,
|
||||
)? {
|
||||
Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: StmtKind::Item(macro_def),
|
||||
span: lo.to(self.prev_span),
|
||||
}
|
||||
// Starts like a simple path, being careful to avoid contextual keywords
|
||||
// such as a union items, item with `crate` visibility or auto trait items.
|
||||
// Our goal here is to parse an arbitrary path `a::b::c` but not something that starts
|
||||
// like a path (1 token), but it fact not a path.
|
||||
// `union::b::c` - path, `union U { ... }` - not a path.
|
||||
// `crate::b::c` - path, `crate struct S;` - not a path.
|
||||
} else if self.token.is_path_start() &&
|
||||
!self.token.is_qpath_start() &&
|
||||
!self.is_union_item() &&
|
||||
!self.is_crate_vis() &&
|
||||
!self.is_auto_trait_item() &&
|
||||
!self.is_async_fn() {
|
||||
let path = self.parse_path(PathStyle::Expr)?;
|
||||
|
||||
if !self.eat(&token::Not) {
|
||||
let expr = if self.check(&token::OpenDelim(token::Brace)) {
|
||||
self.parse_struct_expr(lo, path, ThinVec::new())?
|
||||
} else {
|
||||
let hi = self.prev_span;
|
||||
self.mk_expr(lo.to(hi), ExprKind::Path(None, path), ThinVec::new())
|
||||
};
|
||||
|
||||
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
|
||||
let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
|
||||
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
|
||||
})?;
|
||||
|
||||
return Ok(Some(Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: StmtKind::Expr(expr),
|
||||
span: lo.to(self.prev_span),
|
||||
}));
|
||||
}
|
||||
|
||||
let (delim, tts) = self.expect_delimited_token_tree()?;
|
||||
let hi = self.prev_span;
|
||||
|
||||
let style = if delim == MacDelimiter::Brace {
|
||||
MacStmtStyle::Braces
|
||||
} else {
|
||||
MacStmtStyle::NoBraces
|
||||
};
|
||||
|
||||
let mac = respan(lo.to(hi), Mac_ {
|
||||
path,
|
||||
tts,
|
||||
delim,
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
});
|
||||
let node = if delim == MacDelimiter::Brace ||
|
||||
self.token == token::Semi || self.token == token::Eof {
|
||||
StmtKind::Mac(P((mac, style, attrs.into())))
|
||||
}
|
||||
// We used to incorrectly stop parsing macro-expanded statements here.
|
||||
// If the next token will be an error anyway but could have parsed with the
|
||||
// earlier behavior, stop parsing here and emit a warning to avoid breakage.
|
||||
else if macro_legacy_warnings &&
|
||||
self.token.can_begin_expr() &&
|
||||
match self.token.kind {
|
||||
// These can continue an expression, so we can't stop parsing and warn.
|
||||
token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
|
||||
token::BinOp(token::Minus) | token::BinOp(token::Star) |
|
||||
token::BinOp(token::And) | token::BinOp(token::Or) |
|
||||
token::AndAnd | token::OrOr |
|
||||
token::DotDot | token::DotDotDot | token::DotDotEq => false,
|
||||
_ => true,
|
||||
} {
|
||||
self.warn_missing_semicolon();
|
||||
StmtKind::Mac(P((mac, style, attrs.into())))
|
||||
} else {
|
||||
let e = self.mk_expr(mac.span, ExprKind::Mac(mac), ThinVec::new());
|
||||
let e = self.maybe_recover_from_bad_qpath(e, true)?;
|
||||
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
|
||||
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
|
||||
StmtKind::Expr(e)
|
||||
};
|
||||
Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: lo.to(hi),
|
||||
node,
|
||||
}
|
||||
} else {
|
||||
// FIXME: Bad copy of attrs
|
||||
let old_directory_ownership =
|
||||
mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock);
|
||||
let item = self.parse_item_(attrs.clone(), false, true)?;
|
||||
self.directory.ownership = old_directory_ownership;
|
||||
|
||||
match item {
|
||||
Some(i) => Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: lo.to(i.span),
|
||||
node: StmtKind::Item(i),
|
||||
},
|
||||
None => {
|
||||
let unused_attrs = |attrs: &[Attribute], s: &mut Self| {
|
||||
if !attrs.is_empty() {
|
||||
if s.prev_token_kind == PrevTokenKind::DocComment {
|
||||
s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
|
||||
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
|
||||
s.span_err(
|
||||
s.token.span, "expected statement after outer attribute"
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Do not attempt to parse an expression if we're done here.
|
||||
if self.token == token::Semi {
|
||||
unused_attrs(&attrs, self);
|
||||
self.bump();
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
unused_attrs(&attrs, self);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Remainder are line-expr stmts.
|
||||
let e = self.parse_expr_res(
|
||||
Restrictions::STMT_EXPR, Some(attrs.into()))?;
|
||||
Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: lo.to(e.span),
|
||||
node: StmtKind::Expr(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
/// Parses a local variable declaration.
|
||||
fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
|
||||
let lo = self.prev_span;
|
||||
let pat = self.parse_top_level_pat()?;
|
||||
|
||||
let (err, ty) = if self.eat(&token::Colon) {
|
||||
// Save the state of the parser before parsing type normally, in case there is a `:`
|
||||
// instead of an `=` typo.
|
||||
let parser_snapshot_before_type = self.clone();
|
||||
let colon_sp = self.prev_span;
|
||||
match self.parse_ty() {
|
||||
Ok(ty) => (None, Some(ty)),
|
||||
Err(mut err) => {
|
||||
// Rewind to before attempting to parse the type and continue parsing
|
||||
let parser_snapshot_after_type = self.clone();
|
||||
mem::replace(self, parser_snapshot_before_type);
|
||||
|
||||
let snippet = self.span_to_snippet(pat.span).unwrap();
|
||||
err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
|
||||
(Some((parser_snapshot_after_type, colon_sp, err)), None)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let init = match (self.parse_initializer(err.is_some()), err) {
|
||||
(Ok(init), None) => { // init parsed, ty parsed
|
||||
init
|
||||
}
|
||||
(Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error
|
||||
// Could parse the type as if it were the initializer, it is likely there was a
|
||||
// typo in the code: `:` instead of `=`. Add suggestion and emit the error.
|
||||
err.span_suggestion_short(
|
||||
colon_sp,
|
||||
"use `=` if you meant to assign",
|
||||
"=".to_string(),
|
||||
Applicability::MachineApplicable
|
||||
);
|
||||
err.emit();
|
||||
// As this was parsed successfully, continue as if the code has been fixed for the
|
||||
// rest of the file. It will still fail due to the emitted error, but we avoid
|
||||
// extra noise.
|
||||
init
|
||||
}
|
||||
(Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error
|
||||
init_err.cancel();
|
||||
// Couldn't parse the type nor the initializer, only raise the type error and
|
||||
// return to the parser state before parsing the type as the initializer.
|
||||
// let x: <parse_error>;
|
||||
mem::replace(self, snapshot);
|
||||
return Err(ty_err);
|
||||
}
|
||||
(Err(err), None) => { // init error, ty parsed
|
||||
// Couldn't parse the initializer and we're not attempting to recover a failed
|
||||
// parse of the type, return the error.
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
let hi = if self.token == token::Semi {
|
||||
self.token.span
|
||||
} else {
|
||||
self.prev_span
|
||||
};
|
||||
Ok(P(ast::Local {
|
||||
ty,
|
||||
pat,
|
||||
init,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: lo.to(hi),
|
||||
attrs,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Parses the RHS of a local variable declaration (e.g., '= 14;').
|
||||
fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
|
||||
if self.eat(&token::Eq) {
|
||||
Ok(Some(self.parse_expr()?))
|
||||
} else if skip_eq {
|
||||
Ok(Some(self.parse_expr()?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn is_auto_trait_item(&self) -> bool {
|
||||
// auto trait
|
||||
(self.token.is_keyword(kw::Auto) &&
|
||||
self.is_keyword_ahead(1, &[kw::Trait]))
|
||||
|| // unsafe auto trait
|
||||
(self.token.is_keyword(kw::Unsafe) &&
|
||||
self.is_keyword_ahead(1, &[kw::Auto]) &&
|
||||
self.is_keyword_ahead(2, &[kw::Trait]))
|
||||
}
|
||||
|
||||
/// Parses a block. No inner attributes are allowed.
|
||||
pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
|
||||
maybe_whole!(self, NtBlock, |x| x);
|
||||
|
||||
let lo = self.token.span;
|
||||
|
||||
if !self.eat(&token::OpenDelim(token::Brace)) {
|
||||
let sp = self.token.span;
|
||||
let tok = self.this_token_descr();
|
||||
let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
|
||||
let do_not_suggest_help =
|
||||
self.token.is_keyword(kw::In) || self.token == token::Colon;
|
||||
|
||||
if self.token.is_ident_named(sym::and) {
|
||||
e.span_suggestion_short(
|
||||
self.token.span,
|
||||
"use `&&` instead of `and` for the boolean operator",
|
||||
"&&".to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
if self.token.is_ident_named(sym::or) {
|
||||
e.span_suggestion_short(
|
||||
self.token.span,
|
||||
"use `||` instead of `or` for the boolean operator",
|
||||
"||".to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
|
||||
// Check to see if the user has written something like
|
||||
//
|
||||
// if (cond)
|
||||
// bar;
|
||||
//
|
||||
// Which is valid in other languages, but not Rust.
|
||||
match self.parse_stmt_without_recovery(false) {
|
||||
Ok(Some(stmt)) => {
|
||||
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
|
||||
|| do_not_suggest_help {
|
||||
// if the next token is an open brace (e.g., `if a b {`), the place-
|
||||
// inside-a-block suggestion would be more likely wrong than right
|
||||
e.span_label(sp, "expected `{`");
|
||||
return Err(e);
|
||||
}
|
||||
let mut stmt_span = stmt.span;
|
||||
// expand the span to include the semicolon, if it exists
|
||||
if self.eat(&token::Semi) {
|
||||
stmt_span = stmt_span.with_hi(self.prev_span.hi());
|
||||
}
|
||||
if let Ok(snippet) = self.span_to_snippet(stmt_span) {
|
||||
e.span_suggestion(
|
||||
stmt_span,
|
||||
"try placing this code inside a block",
|
||||
format!("{{ {} }}", snippet),
|
||||
// speculative, has been misleading in the past (#46836)
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(mut e) => {
|
||||
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
|
||||
self.cancel(&mut e);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
e.span_label(sp, "expected `{`");
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
self.parse_block_tail(lo, BlockCheckMode::Default)
|
||||
}
|
||||
|
||||
/// Parses a block. Inner attributes are allowed.
|
||||
crate fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
|
||||
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
|
||||
|
||||
let lo = self.token.span;
|
||||
self.expect(&token::OpenDelim(token::Brace))?;
|
||||
Ok((self.parse_inner_attributes()?,
|
||||
self.parse_block_tail(lo, BlockCheckMode::Default)?))
|
||||
}
|
||||
|
||||
/// Parses the rest of a block expression or function body.
|
||||
/// Precondition: already parsed the '{'.
|
||||
pub(super) fn parse_block_tail(
|
||||
&mut self,
|
||||
lo: Span,
|
||||
s: BlockCheckMode
|
||||
) -> PResult<'a, P<Block>> {
|
||||
let mut stmts = vec![];
|
||||
while !self.eat(&token::CloseDelim(token::Brace)) {
|
||||
if self.token == token::Eof {
|
||||
break;
|
||||
}
|
||||
let stmt = match self.parse_full_stmt(false) {
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
|
||||
Some(Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)),
|
||||
span: self.token.span,
|
||||
})
|
||||
}
|
||||
Ok(stmt) => stmt,
|
||||
};
|
||||
if let Some(stmt) = stmt {
|
||||
stmts.push(stmt);
|
||||
} else {
|
||||
// Found only `;` or `}`.
|
||||
continue;
|
||||
};
|
||||
}
|
||||
Ok(P(ast::Block {
|
||||
stmts,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
rules: s,
|
||||
span: lo.to(self.prev_span),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Parses a statement, including the trailing semicolon.
|
||||
crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
|
||||
// skip looking for a trailing semicolon when we have an interpolated statement
|
||||
maybe_whole!(self, NtStmt, |x| Some(x));
|
||||
|
||||
let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? {
|
||||
Some(stmt) => stmt,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
match stmt.node {
|
||||
StmtKind::Expr(ref expr) if self.token != token::Eof => {
|
||||
// expression without semicolon
|
||||
if classify::expr_requires_semi_to_be_stmt(expr) {
|
||||
// Just check for errors and recover; do not eat semicolon yet.
|
||||
if let Err(mut e) =
|
||||
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
|
||||
{
|
||||
e.emit();
|
||||
self.recover_stmt();
|
||||
// Don't complain about type errors in body tail after parse error (#57383).
|
||||
let sp = expr.span.to(self.prev_span);
|
||||
stmt.node = StmtKind::Expr(DummyResult::raw_expr(sp, true));
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::Local(..) => {
|
||||
// We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
|
||||
if macro_legacy_warnings && self.token != token::Semi {
|
||||
self.warn_missing_semicolon();
|
||||
} else {
|
||||
self.expect_one_of(&[], &[token::Semi])?;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if self.eat(&token::Semi) {
|
||||
stmt = stmt.add_trailing_semicolon();
|
||||
}
|
||||
stmt.span = stmt.span.to(self.prev_span);
|
||||
Ok(Some(stmt))
|
||||
}
|
||||
|
||||
fn warn_missing_semicolon(&self) {
|
||||
self.diagnostic().struct_span_warn(self.token.span, {
|
||||
&format!("expected `;`, found {}", self.this_token_descr())
|
||||
}).note({
|
||||
"This was erroneously allowed and will become a hard error in a future release"
|
||||
}).emit();
|
||||
}
|
||||
}
|
461
src/libsyntax/parse/parser/ty.rs
Normal file
461
src/libsyntax/parse/parser/ty.rs
Normal file
|
@ -0,0 +1,461 @@
|
|||
use super::{Parser, PResult, PathStyle, PrevTokenKind, TokenType};
|
||||
|
||||
use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath};
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
|
||||
use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
|
||||
use crate::ast::{Mutability, AnonConst, FnDecl, Mac_};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::source_map::{respan, Span};
|
||||
use crate::symbol::{kw};
|
||||
|
||||
use rustc_target::spec::abi::Abi;
|
||||
|
||||
use errors::{Applicability};
|
||||
|
||||
/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
|
||||
/// `IDENT<<u8 as Trait>::AssocTy>`.
|
||||
///
|
||||
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
|
||||
/// that `IDENT` is not the ident of a fn trait.
|
||||
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
|
||||
t == &token::ModSep || t == &token::Lt ||
|
||||
t == &token::BinOp(token::Shl)
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses a type.
|
||||
pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
|
||||
self.parse_ty_common(true, true, false)
|
||||
}
|
||||
|
||||
/// Parses a type in restricted contexts where `+` is not permitted.
|
||||
///
|
||||
/// Example 1: `&'a TYPE`
|
||||
/// `+` is prohibited to maintain operator priority (P(+) < P(&)).
|
||||
/// Example 2: `value1 as TYPE + value2`
|
||||
/// `+` is prohibited to avoid interactions with expression grammar.
|
||||
pub(super) fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
|
||||
self.parse_ty_common(false, true, false)
|
||||
}
|
||||
|
||||
/// Parses an optional return type `[ -> TY ]` in a function declaration.
|
||||
pub(super) fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
|
||||
if self.eat(&token::RArrow) {
|
||||
Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?))
|
||||
} else {
|
||||
Ok(FunctionRetTy::Default(self.token.span.shrink_to_lo()))
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool,
|
||||
allow_c_variadic: bool) -> PResult<'a, P<Ty>> {
|
||||
maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
|
||||
maybe_whole!(self, NtTy, |x| x);
|
||||
|
||||
let lo = self.token.span;
|
||||
let mut impl_dyn_multi = false;
|
||||
let node = if self.eat(&token::OpenDelim(token::Paren)) {
|
||||
// `(TYPE)` is a parenthesized type.
|
||||
// `(TYPE,)` is a tuple with a single field of type TYPE.
|
||||
let mut ts = vec![];
|
||||
let mut last_comma = false;
|
||||
while self.token != token::CloseDelim(token::Paren) {
|
||||
ts.push(self.parse_ty()?);
|
||||
if self.eat(&token::Comma) {
|
||||
last_comma = true;
|
||||
} else {
|
||||
last_comma = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus;
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
|
||||
if ts.len() == 1 && !last_comma {
|
||||
let ty = ts.into_iter().nth(0).unwrap().into_inner();
|
||||
let maybe_bounds = allow_plus && self.token.is_like_plus();
|
||||
match ty.node {
|
||||
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
|
||||
TyKind::Path(None, ref path) if maybe_bounds => {
|
||||
self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)?
|
||||
}
|
||||
TyKind::TraitObject(ref bounds, TraitObjectSyntax::None)
|
||||
if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
|
||||
let path = match bounds[0] {
|
||||
GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
|
||||
GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
|
||||
};
|
||||
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
|
||||
}
|
||||
// `(TYPE)`
|
||||
_ => TyKind::Paren(P(ty))
|
||||
}
|
||||
} else {
|
||||
TyKind::Tup(ts)
|
||||
}
|
||||
} else if self.eat(&token::Not) {
|
||||
// Never type `!`
|
||||
TyKind::Never
|
||||
} else if self.eat(&token::BinOp(token::Star)) {
|
||||
// Raw pointer
|
||||
TyKind::Ptr(self.parse_ptr()?)
|
||||
} else if self.eat(&token::OpenDelim(token::Bracket)) {
|
||||
// Array or slice
|
||||
let t = self.parse_ty()?;
|
||||
// Parse optional `; EXPR` in `[TYPE; EXPR]`
|
||||
let t = match self.maybe_parse_fixed_length_of_vec()? {
|
||||
None => TyKind::Slice(t),
|
||||
Some(length) => TyKind::Array(t, AnonConst {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
value: length,
|
||||
}),
|
||||
};
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
t
|
||||
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
|
||||
// Reference
|
||||
self.expect_and()?;
|
||||
self.parse_borrowed_pointee()?
|
||||
} else if self.eat_keyword_noexpect(kw::Typeof) {
|
||||
// `typeof(EXPR)`
|
||||
// In order to not be ambiguous, the type must be surrounded by parens.
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
let e = AnonConst {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
value: self.parse_expr()?,
|
||||
};
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
TyKind::Typeof(e)
|
||||
} else if self.eat_keyword(kw::Underscore) {
|
||||
// A type to be inferred `_`
|
||||
TyKind::Infer
|
||||
} else if self.token_is_bare_fn_keyword() {
|
||||
// Function pointer type
|
||||
self.parse_ty_bare_fn(Vec::new())?
|
||||
} else if self.check_keyword(kw::For) {
|
||||
// Function pointer type or bound list (trait object type) starting with a poly-trait.
|
||||
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
|
||||
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
|
||||
let lo = self.token.span;
|
||||
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
||||
if self.token_is_bare_fn_keyword() {
|
||||
self.parse_ty_bare_fn(lifetime_defs)?
|
||||
} else {
|
||||
let path = self.parse_path(PathStyle::Type)?;
|
||||
let parse_plus = allow_plus && self.check_plus();
|
||||
self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
|
||||
}
|
||||
} else if self.eat_keyword(kw::Impl) {
|
||||
// Always parse bounds greedily for better error recovery.
|
||||
let bounds = self.parse_generic_bounds(None)?;
|
||||
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
|
||||
TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
|
||||
} else if self.check_keyword(kw::Dyn) &&
|
||||
(self.token.span.rust_2018() ||
|
||||
self.look_ahead(1, |t| t.can_begin_bound() &&
|
||||
!can_continue_type_after_non_fn_ident(t))) {
|
||||
self.bump(); // `dyn`
|
||||
// Always parse bounds greedily for better error recovery.
|
||||
let bounds = self.parse_generic_bounds(None)?;
|
||||
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
|
||||
TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
|
||||
} else if self.check(&token::Question) ||
|
||||
self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) {
|
||||
// Bound list (trait object type)
|
||||
TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?,
|
||||
TraitObjectSyntax::None)
|
||||
} else if self.eat_lt() {
|
||||
// Qualified path
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Type)?;
|
||||
TyKind::Path(Some(qself), path)
|
||||
} else if self.token.is_path_start() {
|
||||
// Simple path
|
||||
let path = self.parse_path(PathStyle::Type)?;
|
||||
if self.eat(&token::Not) {
|
||||
// Macro invocation in type position
|
||||
let (delim, tts) = self.expect_delimited_token_tree()?;
|
||||
let node = Mac_ {
|
||||
path,
|
||||
tts,
|
||||
delim,
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
};
|
||||
TyKind::Mac(respan(lo.to(self.prev_span), node))
|
||||
} else {
|
||||
// Just a type path or bound list (trait object type) starting with a trait.
|
||||
// `Type`
|
||||
// `Trait1 + Trait2 + 'a`
|
||||
if allow_plus && self.check_plus() {
|
||||
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
|
||||
} else {
|
||||
TyKind::Path(None, path)
|
||||
}
|
||||
}
|
||||
} else if self.check(&token::DotDotDot) {
|
||||
if allow_c_variadic {
|
||||
self.eat(&token::DotDotDot);
|
||||
TyKind::CVarArgs
|
||||
} else {
|
||||
return Err(self.fatal(
|
||||
"only foreign functions are allowed to be C-variadic"
|
||||
));
|
||||
}
|
||||
} else {
|
||||
let msg = format!("expected type, found {}", self.this_token_descr());
|
||||
let mut err = self.fatal(&msg);
|
||||
err.span_label(self.token.span, "expected type");
|
||||
self.maybe_annotate_with_ascription(&mut err, true);
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
let span = lo.to(self.prev_span);
|
||||
let ty = P(Ty { node, span, id: ast::DUMMY_NODE_ID });
|
||||
|
||||
// Try to recover from use of `+` with incorrect priority.
|
||||
self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
|
||||
self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?;
|
||||
self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)
|
||||
}
|
||||
|
||||
fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path,
|
||||
lo: Span, parse_plus: bool) -> PResult<'a, TyKind> {
|
||||
let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span));
|
||||
let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
|
||||
if parse_plus {
|
||||
self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
|
||||
bounds.append(&mut self.parse_generic_bounds(Some(self.prev_span))?);
|
||||
}
|
||||
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
|
||||
}
|
||||
|
||||
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
|
||||
let mutbl = if self.eat_keyword(kw::Mut) {
|
||||
Mutability::Mutable
|
||||
} else if self.eat_keyword(kw::Const) {
|
||||
Mutability::Immutable
|
||||
} else {
|
||||
let span = self.prev_span;
|
||||
let msg = "expected mut or const in raw pointer type";
|
||||
self.struct_span_err(span, msg)
|
||||
.span_label(span, msg)
|
||||
.help("use `*mut T` or `*const T` as appropriate")
|
||||
.emit();
|
||||
Mutability::Immutable
|
||||
};
|
||||
let t = self.parse_ty_no_plus()?;
|
||||
Ok(MutTy { ty: t, mutbl })
|
||||
}
|
||||
|
||||
fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> {
|
||||
if self.eat(&token::Semi) {
|
||||
Ok(Some(self.parse_expr()?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
|
||||
let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
|
||||
let mutbl = self.parse_mutability();
|
||||
let ty = self.parse_ty_no_plus()?;
|
||||
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }));
|
||||
}
|
||||
|
||||
/// Is the current token one of the keywords that signals a bare function type?
|
||||
fn token_is_bare_fn_keyword(&mut self) -> bool {
|
||||
self.check_keyword(kw::Fn) ||
|
||||
self.check_keyword(kw::Unsafe) ||
|
||||
self.check_keyword(kw::Extern)
|
||||
}
|
||||
|
||||
/// Parses a `TyKind::BareFn` type.
|
||||
fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
|
||||
/*
|
||||
|
||||
[unsafe] [extern "ABI"] fn (S) -> T
|
||||
^~~~^ ^~~~^ ^~^ ^
|
||||
| | | |
|
||||
| | | Return type
|
||||
| | Argument types
|
||||
| |
|
||||
| ABI
|
||||
Function Style
|
||||
*/
|
||||
|
||||
let unsafety = self.parse_unsafety();
|
||||
let abi = if self.eat_keyword(kw::Extern) {
|
||||
self.parse_opt_abi()?.unwrap_or(Abi::C)
|
||||
} else {
|
||||
Abi::Rust
|
||||
};
|
||||
|
||||
self.expect_keyword(kw::Fn)?;
|
||||
let (inputs, c_variadic) = self.parse_fn_args(false, true)?;
|
||||
let ret_ty = self.parse_ret_ty(false)?;
|
||||
let decl = P(FnDecl {
|
||||
inputs,
|
||||
output: ret_ty,
|
||||
c_variadic,
|
||||
});
|
||||
Ok(TyKind::BareFn(P(BareFnTy {
|
||||
abi,
|
||||
unsafety,
|
||||
generic_params,
|
||||
decl,
|
||||
})))
|
||||
}
|
||||
|
||||
crate fn parse_generic_bounds(&mut self,
|
||||
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
|
||||
self.parse_generic_bounds_common(true, colon_span)
|
||||
}
|
||||
|
||||
/// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
|
||||
///
|
||||
/// ```
|
||||
/// BOUND = TY_BOUND | LT_BOUND
|
||||
/// LT_BOUND = LIFETIME (e.g., `'a`)
|
||||
/// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
|
||||
/// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
|
||||
/// ```
|
||||
fn parse_generic_bounds_common(&mut self,
|
||||
allow_plus: bool,
|
||||
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
|
||||
let mut bounds = Vec::new();
|
||||
let mut negative_bounds = Vec::new();
|
||||
let mut last_plus_span = None;
|
||||
let mut was_negative = false;
|
||||
loop {
|
||||
// This needs to be synchronized with `TokenKind::can_begin_bound`.
|
||||
let is_bound_start = self.check_path() || self.check_lifetime() ||
|
||||
self.check(&token::Not) || // used for error reporting only
|
||||
self.check(&token::Question) ||
|
||||
self.check_keyword(kw::For) ||
|
||||
self.check(&token::OpenDelim(token::Paren));
|
||||
if is_bound_start {
|
||||
let lo = self.token.span;
|
||||
let has_parens = self.eat(&token::OpenDelim(token::Paren));
|
||||
let inner_lo = self.token.span;
|
||||
let is_negative = self.eat(&token::Not);
|
||||
let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None };
|
||||
if self.token.is_lifetime() {
|
||||
if let Some(question_span) = question {
|
||||
self.span_err(question_span,
|
||||
"`?` may only modify trait bounds, not lifetime bounds");
|
||||
}
|
||||
bounds.push(GenericBound::Outlives(self.expect_lifetime()));
|
||||
if has_parens {
|
||||
let inner_span = inner_lo.to(self.prev_span);
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
let mut err = self.struct_span_err(
|
||||
lo.to(self.prev_span),
|
||||
"parenthesized lifetime bounds are not supported"
|
||||
);
|
||||
if let Ok(snippet) = self.span_to_snippet(inner_span) {
|
||||
err.span_suggestion_short(
|
||||
lo.to(self.prev_span),
|
||||
"remove the parentheses",
|
||||
snippet.to_owned(),
|
||||
Applicability::MachineApplicable
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
} else {
|
||||
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
||||
let path = self.parse_path(PathStyle::Type)?;
|
||||
if has_parens {
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
}
|
||||
let poly_span = lo.to(self.prev_span);
|
||||
if is_negative {
|
||||
was_negative = true;
|
||||
if let Some(sp) = last_plus_span.or(colon_span) {
|
||||
negative_bounds.push(sp.to(poly_span));
|
||||
}
|
||||
} else {
|
||||
let poly_trait = PolyTraitRef::new(lifetime_defs, path, poly_span);
|
||||
let modifier = if question.is_some() {
|
||||
TraitBoundModifier::Maybe
|
||||
} else {
|
||||
TraitBoundModifier::None
|
||||
};
|
||||
bounds.push(GenericBound::Trait(poly_trait, modifier));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
|
||||
if !allow_plus || !self.eat_plus() {
|
||||
break
|
||||
} else {
|
||||
last_plus_span = Some(self.prev_span);
|
||||
}
|
||||
}
|
||||
|
||||
if !negative_bounds.is_empty() || was_negative {
|
||||
let plural = negative_bounds.len() > 1;
|
||||
let last_span = negative_bounds.last().map(|sp| *sp);
|
||||
let mut err = self.struct_span_err(
|
||||
negative_bounds,
|
||||
"negative trait bounds are not supported",
|
||||
);
|
||||
if let Some(sp) = last_span {
|
||||
err.span_label(sp, "negative trait bounds are not supported");
|
||||
}
|
||||
if let Some(bound_list) = colon_span {
|
||||
let bound_list = bound_list.to(self.prev_span);
|
||||
let mut new_bound_list = String::new();
|
||||
if !bounds.is_empty() {
|
||||
let mut snippets = bounds.iter().map(|bound| bound.span())
|
||||
.map(|span| self.span_to_snippet(span));
|
||||
while let Some(Ok(snippet)) = snippets.next() {
|
||||
new_bound_list.push_str(" + ");
|
||||
new_bound_list.push_str(&snippet);
|
||||
}
|
||||
new_bound_list = new_bound_list.replacen(" +", ":", 1);
|
||||
}
|
||||
err.span_suggestion_hidden(
|
||||
bound_list,
|
||||
&format!("remove the trait bound{}", if plural { "s" } else { "" }),
|
||||
new_bound_list,
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
|
||||
return Ok(bounds);
|
||||
}
|
||||
|
||||
pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
|
||||
if self.eat_keyword(kw::For) {
|
||||
self.expect_lt()?;
|
||||
let params = self.parse_generic_params()?;
|
||||
self.expect_gt()?;
|
||||
// We rely on AST validation to rule out invalid cases: There must not be type
|
||||
// parameters, and the lifetime parameters must not have bounds.
|
||||
Ok(params)
|
||||
} else {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
crate fn check_lifetime(&mut self) -> bool {
|
||||
self.expected_tokens.push(TokenType::Lifetime);
|
||||
self.token.is_lifetime()
|
||||
}
|
||||
|
||||
/// Parses a single lifetime `'a` or panics.
|
||||
crate fn expect_lifetime(&mut self) -> Lifetime {
|
||||
if let Some(ident) = self.token.lifetime() {
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
|
||||
} else {
|
||||
self.span_bug(self.token.span, "not a lifetime")
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue