mv compiler to compiler/

This commit is contained in:
mark 2020-08-27 22:58:48 -05:00 committed by Vadim Petrochenkov
parent db534b3ac2
commit 9e5f7d5631
1686 changed files with 941 additions and 1051 deletions

View file

@ -0,0 +1,304 @@
use super::{Parser, PathStyle};
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::token::{self, Nonterminal};
use rustc_ast_pretty::pprust;
use rustc_errors::{error_code, PResult};
use rustc_span::Span;
use tracing::debug;
#[derive(Debug)]
pub(super) enum InnerAttrPolicy<'a> {
Permitted,
Forbidden { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> },
}
const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
permitted in this context";
pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPolicy::Forbidden {
reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG,
saw_doc_comment: false,
prev_attr_sp: None,
};
impl<'a> Parser<'a> {
/// Parses attributes that appear before an item.
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = Vec::new();
let mut just_parsed_doc_comment = false;
loop {
debug!("parse_outer_attributes: self.token={:?}", self.token);
if self.check(&token::Pound) {
let inner_error_reason = if just_parsed_doc_comment {
"an inner attribute is not permitted following an outer doc comment"
} else if !attrs.is_empty() {
"an inner attribute is not permitted following an outer attribute"
} else {
DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
};
let inner_parse_policy = InnerAttrPolicy::Forbidden {
reason: inner_error_reason,
saw_doc_comment: just_parsed_doc_comment,
prev_attr_sp: attrs.last().map(|a| a.span),
};
let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
attrs.push(attr);
just_parsed_doc_comment = false;
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
if attr.style != ast::AttrStyle::Outer {
self.sess
.span_diagnostic
.struct_span_err_with_code(
self.token.span,
"expected outer doc comment",
error_code!(E0753),
)
.note(
"inner doc comments like this (starting with \
`//!` or `/*!`) can only appear before items",
)
.emit();
}
attrs.push(attr);
self.bump();
just_parsed_doc_comment = true;
} else {
break;
}
}
Ok(attrs)
}
/// Matches `attribute = # ! [ meta_item ]`.
///
/// If `permit_inner` is `true`, then a leading `!` indicates an inner
/// attribute.
pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> {
debug!("parse_attribute: permit_inner={:?} self.token={:?}", permit_inner, self.token);
let inner_parse_policy =
if permit_inner { InnerAttrPolicy::Permitted } else { DEFAULT_INNER_ATTR_FORBIDDEN };
self.parse_attribute_with_inner_parse_policy(inner_parse_policy)
}
/// The same as `parse_attribute`, except it takes in an `InnerAttrPolicy`
/// that prescribes how to handle inner attributes.
fn parse_attribute_with_inner_parse_policy(
&mut self,
inner_parse_policy: InnerAttrPolicy<'_>,
) -> PResult<'a, ast::Attribute> {
debug!(
"parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
inner_parse_policy, self.token
);
let lo = self.token.span;
let (span, item, style) = if self.eat(&token::Pound) {
let style =
if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
self.expect(&token::OpenDelim(token::Bracket))?;
let item = self.parse_attr_item()?;
self.expect(&token::CloseDelim(token::Bracket))?;
let attr_sp = lo.to(self.prev_token.span);
// Emit error if inner attribute is encountered and forbidden.
if style == ast::AttrStyle::Inner {
self.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
}
(attr_sp, item, style)
} else {
let token_str = pprust::token_to_string(&self.token);
let msg = &format!("expected `#`, found `{}`", token_str);
return Err(self.struct_span_err(self.token.span, msg));
};
Ok(attr::mk_attr_from_item(style, item, span))
}
pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy<'_>) {
if let InnerAttrPolicy::Forbidden { reason, saw_doc_comment, prev_attr_sp } = policy {
let prev_attr_note =
if saw_doc_comment { "previous doc comment" } else { "previous outer attribute" };
let mut diag = self.struct_span_err(attr_sp, reason);
if let Some(prev_attr_sp) = prev_attr_sp {
diag.span_label(attr_sp, "not permitted following an outer attribute")
.span_label(prev_attr_sp, prev_attr_note);
}
diag.note(
"inner attributes, like `#![no_std]`, annotate the item enclosing them, \
and are usually found at the beginning of source files. \
Outer attributes, like `#[test]`, annotate the item following them.",
)
.emit();
}
}
/// Parses an inner part of an attribute (the path and following tokens).
/// The tokens must be either a delimited token stream, or empty token stream,
/// or the "legacy" key-value form.
/// PATH `(` TOKEN_STREAM `)`
/// PATH `[` TOKEN_STREAM `]`
/// PATH `{` TOKEN_STREAM `}`
/// PATH
/// PATH `=` UNSUFFIXED_LIT
/// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
let item = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
_ => None,
},
_ => None,
};
Ok(if let Some(item) = item {
self.bump();
item
} else {
let path = self.parse_path(PathStyle::Mod)?;
let args = self.parse_attr_args()?;
ast::AttrItem { path, args }
})
}
/// Parses attributes that appear after the opening of an item. These should
/// be preceded by an exclamation mark, but we accept and warn about one
/// terminated by a semicolon.
///
/// Matches `inner_attrs*`.
crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = vec![];
loop {
// Only try to parse if it is an inner attribute (has `!`).
if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
let attr = self.parse_attribute(true)?;
assert_eq!(attr.style, ast::AttrStyle::Inner);
attrs.push(attr);
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
// We need to get the position of this token before we bump.
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
if attr.style == ast::AttrStyle::Inner {
attrs.push(attr);
self.bump();
} else {
break;
}
} else {
break;
}
}
Ok(attrs)
}
crate fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
let lit = self.parse_lit()?;
debug!("checking if {:?} is unusuffixed", lit);
if !lit.kind.is_unsuffixed() {
self.struct_span_err(lit.span, "suffixed literals are not allowed in attributes")
.help(
"instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
use an unsuffixed version (`1`, `1.0`, etc.)",
)
.emit();
}
Ok(lit)
}
/// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited.
pub fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> {
let cfg_predicate = self.parse_meta_item()?;
self.expect(&token::Comma)?;
// Presumably, the majority of the time there will only be one attr.
let mut expanded_attrs = Vec::with_capacity(1);
while self.token.kind != token::Eof {
let lo = self.token.span;
let item = self.parse_attr_item()?;
expanded_attrs.push((item, lo.to(self.prev_token.span)));
if !self.eat(&token::Comma) {
break;
}
}
Ok((cfg_predicate, expanded_attrs))
}
/// Matches `COMMASEP(meta_item_inner)`.
crate fn parse_meta_seq_top(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
// Presumably, the majority of the time there will only be one attr.
let mut nmis = Vec::with_capacity(1);
while self.token.kind != token::Eof {
nmis.push(self.parse_meta_item_inner()?);
if !self.eat(&token::Comma) {
break;
}
}
Ok(nmis)
}
/// Matches the following grammar (per RFC 1559).
///
/// meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
token::NtMeta(ref e) => Some(e.clone()),
_ => None,
},
_ => None,
};
if let Some(item) = nt_meta {
return match item.meta(item.path.span) {
Some(meta) => {
self.bump();
Ok(meta)
}
None => self.unexpected(),
};
}
let lo = self.token.span;
let path = self.parse_path(PathStyle::Mod)?;
let kind = self.parse_meta_item_kind()?;
let span = lo.to(self.prev_token.span);
Ok(ast::MetaItem { path, kind, span })
}
crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
} else if self.check(&token::OpenDelim(token::Paren)) {
// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
ast::MetaItemKind::List(list)
} else {
ast::MetaItemKind::Word
})
}
/// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
match self.parse_unsuffixed_lit() {
Ok(lit) => return Ok(ast::NestedMetaItem::Literal(lit)),
Err(ref mut err) => err.cancel(),
}
match self.parse_meta_item() {
Ok(mi) => return Ok(ast::NestedMetaItem::MetaItem(mi)),
Err(ref mut err) => err.cancel(),
}
let found = pprust::token_to_string(&self.token);
let msg = format!("expected unsuffixed literal or identifier, found `{}`", found);
Err(self.struct_span_err(self.token.span, &msg))
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,292 @@
use super::Parser;
use rustc_ast::token;
use rustc_ast::{
self as ast, Attribute, GenericBounds, GenericParam, GenericParamKind, WhereClause,
};
use rustc_errors::PResult;
use rustc_span::symbol::{kw, sym};
impl<'a> Parser<'a> {
/// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
///
/// ```text
/// BOUND = LT_BOUND (e.g., `'a`)
/// ```
fn parse_lt_param_bounds(&mut self) -> GenericBounds {
let mut lifetimes = Vec::new();
while self.check_lifetime() {
lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
if !self.eat_plus() {
break;
}
}
lifetimes
}
/// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
fn parse_ty_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
let ident = self.parse_ident()?;
// Parse optional colon and param bounds.
let bounds = if self.eat(&token::Colon) {
self.parse_generic_bounds(Some(self.prev_token.span))?
} else {
Vec::new()
};
let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
attrs: preceding_attrs.into(),
bounds,
kind: GenericParamKind::Type { default },
is_placeholder: false,
})
}
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
let const_span = self.token.span;
self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
self.sess.gated_spans.gate(sym::min_const_generics, const_span.to(self.prev_token.span));
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
attrs: preceding_attrs.into(),
bounds: Vec::new(),
kind: GenericParamKind::Const { ty, kw_span: const_span },
is_placeholder: false,
})
}
/// Parses a (possibly empty) list of lifetime and type parameters, possibly including
/// a trailing comma and erroneous trailing attributes.
pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
let mut params = Vec::new();
loop {
let attrs = self.parse_outer_attributes()?;
if self.check_lifetime() {
let lifetime = self.expect_lifetime();
// Parse lifetime parameter.
let bounds =
if self.eat(&token::Colon) { self.parse_lt_param_bounds() } else { Vec::new() };
params.push(ast::GenericParam {
ident: lifetime.ident,
id: lifetime.id,
attrs: attrs.into(),
bounds,
kind: ast::GenericParamKind::Lifetime,
is_placeholder: false,
});
} else if self.check_keyword(kw::Const) {
// Parse const parameter.
params.push(self.parse_const_param(attrs)?);
} else if self.check_ident() {
// Parse type parameter.
params.push(self.parse_ty_param(attrs)?);
} else if self.token.can_begin_type() {
// Trying to write an associated type bound? (#26271)
let snapshot = self.clone();
match self.parse_ty_where_predicate() {
Ok(where_predicate) => {
self.struct_span_err(
where_predicate.span(),
"bounds on associated types do not belong here",
)
.span_label(where_predicate.span(), "belongs in `where` clause")
.emit();
}
Err(mut err) => {
err.cancel();
*self = snapshot;
break;
}
}
} else {
// Check for trailing attributes and stop parsing.
if !attrs.is_empty() {
if !params.is_empty() {
self.struct_span_err(
attrs[0].span,
"trailing attribute after generic parameter",
)
.span_label(attrs[0].span, "attributes must go before parameters")
.emit();
} else {
self.struct_span_err(attrs[0].span, "attribute without generic parameters")
.span_label(
attrs[0].span,
"attributes are only permitted when preceding parameters",
)
.emit();
}
}
break;
}
if !self.eat(&token::Comma) {
break;
}
}
Ok(params)
}
/// Parses a set of optional generic type parameter declarations. Where
/// clauses are not parsed here, and must be added later via
/// `parse_where_clause()`.
///
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
/// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
pub(super) fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
let span_lo = self.token.span;
let (params, span) = if self.eat_lt() {
let params = self.parse_generic_params()?;
self.expect_gt()?;
(params, span_lo.to(self.prev_token.span))
} else {
(vec![], self.prev_token.span.shrink_to_hi())
};
Ok(ast::Generics {
params,
where_clause: WhereClause {
has_where_token: false,
predicates: Vec::new(),
span: self.prev_token.span.shrink_to_hi(),
},
span,
})
}
/// Parses an optional where-clause and places it in `generics`.
///
/// ```ignore (only-for-syntax-highlight)
/// where T : Trait<U, V> + 'b, 'a : 'b
/// ```
pub(super) fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
let mut where_clause = WhereClause {
has_where_token: false,
predicates: Vec::new(),
span: self.prev_token.span.shrink_to_hi(),
};
if !self.eat_keyword(kw::Where) {
return Ok(where_clause);
}
where_clause.has_where_token = true;
let lo = self.prev_token.span;
// We are considering adding generics to the `where` keyword as an alternative higher-rank
// parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
// change we parse those generics now, but report an error.
if self.choose_generics_over_qpath(0) {
let generics = self.parse_generics()?;
self.struct_span_err(
generics.span,
"generic parameters on `where` clauses are reserved for future use",
)
.span_label(generics.span, "currently unsupported")
.emit();
}
loop {
let lo = self.token.span;
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
let lifetime = self.expect_lifetime();
// Bounds starting with a colon are mandatory, but possibly empty.
self.expect(&token::Colon)?;
let bounds = self.parse_lt_param_bounds();
where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
ast::WhereRegionPredicate {
span: lo.to(self.prev_token.span),
lifetime,
bounds,
},
));
} else if self.check_type() {
where_clause.predicates.push(self.parse_ty_where_predicate()?);
} else {
break;
}
if !self.eat(&token::Comma) {
break;
}
}
where_clause.span = lo.to(self.prev_token.span);
Ok(where_clause)
}
fn parse_ty_where_predicate(&mut self) -> PResult<'a, ast::WherePredicate> {
let lo = self.token.span;
// Parse optional `for<'a, 'b>`.
// This `for` is parsed greedily and applies to the whole predicate,
// the bounded type can have its own `for` applying only to it.
// Examples:
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
// Parse type with mandatory colon and (possibly empty) bounds,
// or with mandatory equality sign and the second type.
let ty = self.parse_ty()?;
if self.eat(&token::Colon) {
let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?;
Ok(ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
span: lo.to(self.prev_token.span),
bound_generic_params: lifetime_defs,
bounded_ty: ty,
bounds,
}))
// FIXME: Decide what should be used here, `=` or `==`.
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
let rhs_ty = self.parse_ty()?;
Ok(ast::WherePredicate::EqPredicate(ast::WhereEqPredicate {
span: lo.to(self.prev_token.span),
lhs_ty: ty,
rhs_ty,
id: ast::DUMMY_NODE_ID,
}))
} else {
self.unexpected()
}
}
pub(super) fn choose_generics_over_qpath(&self, start: usize) -> bool {
// There's an ambiguity between generic parameters and qualified paths in impls.
// If we see `<` it may start both, so we have to inspect some following tokens.
// The following combinations can only start generics,
// but not qualified paths (with one exception):
// `<` `>` - empty generic parameters
// `<` `#` - generic parameters with attributes
// `<` (LIFETIME|IDENT) `>` - single generic parameter
// `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
// `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
// `<` (LIFETIME|IDENT) `=` - generic parameter with a default
// `<` const - generic const parameter
// The only truly ambiguous case is
// `<` IDENT `>` `::` IDENT ...
// we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
// because this is what almost always expected in practice, qualified paths in impls
// (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
self.look_ahead(start, |t| t == &token::Lt)
&& (self.look_ahead(start + 1, |t| t == &token::Pound || t == &token::Gt)
|| self.look_ahead(start + 1, |t| t.is_lifetime() || t.is_ident())
&& self.look_ahead(start + 2, |t| {
matches!(t.kind, token::Gt | token::Comma | token::Colon | token::Eq)
})
|| self.is_keyword_ahead(start + 1, &[kw::Const]))
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,170 @@
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Nonterminal, NonterminalKind, Token};
use rustc_ast_pretty::pprust;
use rustc_errors::PResult;
use rustc_span::symbol::{kw, Ident};
use crate::parser::{FollowedByType, Parser, PathStyle};
impl<'a> Parser<'a> {
/// Checks whether a non-terminal may begin with a particular token.
///
/// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
/// token. Be conservative (return true) if not sure.
pub fn nonterminal_may_begin_with(kind: NonterminalKind, token: &Token) -> bool {
/// Checks whether the non-terminal may contain a single (non-keyword) identifier.
fn may_be_ident(nt: &token::Nonterminal) -> bool {
match *nt {
token::NtItem(_) | token::NtBlock(_) | token::NtVis(_) | token::NtLifetime(_) => {
false
}
_ => true,
}
}
match kind {
NonterminalKind::Expr => {
token.can_begin_expr()
// This exception is here for backwards compatibility.
&& !token.is_keyword(kw::Let)
}
NonterminalKind::Ty => token.can_begin_type(),
NonterminalKind::Ident => get_macro_ident(token).is_some(),
NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
NonterminalKind::Vis => match token.kind {
// The follow-set of :vis + "priv" keyword + interpolated
token::Comma | token::Ident(..) | token::Interpolated(..) => true,
_ => token.can_begin_type(),
},
NonterminalKind::Block => match token.kind {
token::OpenDelim(token::Brace) => true,
token::Interpolated(ref nt) => match **nt {
token::NtItem(_)
| token::NtPat(_)
| token::NtTy(_)
| token::NtIdent(..)
| token::NtMeta(_)
| token::NtPath(_)
| token::NtVis(_) => false, // none of these may start with '{'.
_ => true,
},
_ => false,
},
NonterminalKind::Path | NonterminalKind::Meta => match token.kind {
token::ModSep | token::Ident(..) => true,
token::Interpolated(ref nt) => match **nt {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt),
},
_ => false,
},
NonterminalKind::Pat => match token.kind {
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
token::OpenDelim(token::Paren) | // tuple pattern
token::OpenDelim(token::Bracket) | // slice pattern
token::BinOp(token::And) | // reference
token::BinOp(token::Minus) | // negative literal
token::AndAnd | // double reference
token::Literal(..) | // literal
token::DotDot | // range pattern (future compat)
token::DotDotDot | // range pattern (future compat)
token::ModSep | // path
token::Lt | // path (UFCS constant)
token::BinOp(token::Shl) => true, // path (double UFCS)
token::Interpolated(ref nt) => may_be_ident(nt),
_ => false,
},
NonterminalKind::Lifetime => match token.kind {
token::Lifetime(_) => true,
token::Interpolated(ref nt) => match **nt {
token::NtLifetime(_) | token::NtTT(_) => true,
_ => false,
},
_ => false,
},
NonterminalKind::TT | NonterminalKind::Item | NonterminalKind::Stmt => match token.kind
{
token::CloseDelim(_) => false,
_ => true,
},
}
}
pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, Nonterminal> {
// Any `Nonterminal` which stores its tokens (currently `NtItem` and `NtExpr`)
// needs to have them force-captured here.
// A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
// which requires having captured tokens available. Since we cannot determine
// in advance whether or not a proc-macro will be (transitively) invoked,
// we always capture tokens for any `Nonterminal` which needs them.
Ok(match kind {
NonterminalKind::Item => match self.collect_tokens(|this| this.parse_item())? {
(Some(mut item), tokens) => {
// If we captured tokens during parsing (due to outer attributes),
// use those.
if item.tokens.is_none() {
item.tokens = Some(tokens);
}
token::NtItem(item)
}
(None, _) => {
return Err(self.struct_span_err(self.token.span, "expected an item keyword"));
}
},
NonterminalKind::Block => token::NtBlock(self.parse_block()?),
NonterminalKind::Stmt => match self.parse_stmt()? {
Some(s) => token::NtStmt(s),
None => return Err(self.struct_span_err(self.token.span, "expected a statement")),
},
NonterminalKind::Pat => {
let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?;
// We have have eaten an NtPat, which could already have tokens
if pat.tokens.is_none() {
pat.tokens = Some(tokens);
}
token::NtPat(pat)
}
NonterminalKind::Expr => {
let (mut expr, tokens) = self.collect_tokens(|this| this.parse_expr())?;
// If we captured tokens during parsing (due to outer attributes),
// use those.
if expr.tokens.is_none() {
expr.tokens = Some(tokens);
}
token::NtExpr(expr)
}
NonterminalKind::Literal => token::NtLiteral(self.parse_literal_maybe_minus()?),
NonterminalKind::Ty => token::NtTy(self.parse_ty()?),
// this could be handled like a token, since it is one
NonterminalKind::Ident => {
if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
self.bump();
token::NtIdent(ident, is_raw)
} else {
let token_str = pprust::token_to_string(&self.token);
let msg = &format!("expected ident, found {}", &token_str);
return Err(self.struct_span_err(self.token.span, msg));
}
}
NonterminalKind::Path => token::NtPath(self.parse_path(PathStyle::Type)?),
NonterminalKind::Meta => token::NtMeta(P(self.parse_attr_item()?)),
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
NonterminalKind::Vis => token::NtVis(self.parse_visibility(FollowedByType::Yes)?),
NonterminalKind::Lifetime => {
if self.check_lifetime() {
token::NtLifetime(self.expect_lifetime().ident)
} else {
let token_str = pprust::token_to_string(&self.token);
let msg = &format!("expected a lifetime, found `{}`", &token_str);
return Err(self.struct_span_err(self.token.span, msg));
}
}
})
}
}
/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
token.ident().filter(|(ident, _)| ident.name != kw::Underscore)
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,516 @@
use super::ty::{AllowPlus, RecoverQPath};
use super::{Parser, TokenType};
use crate::maybe_whole;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token};
use rustc_ast::{
self as ast, AngleBracketedArg, AngleBracketedArgs, GenericArg, ParenthesizedArgs,
};
use rustc_ast::{AnonConst, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode};
use rustc_ast::{Path, PathSegment, QSelf};
use rustc_errors::{pluralize, Applicability, PResult};
use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym, Ident};
use std::mem;
use tracing::debug;
/// Specifies how to parse a path.
#[derive(Copy, Clone, PartialEq)]
pub enum PathStyle {
/// In some contexts, notably in expressions, paths with generic arguments are ambiguous
/// with something else. For example, in expressions `segment < ....` can be interpreted
/// as a comparison and `segment ( ....` can be interpreted as a function call.
/// In all such contexts the non-path interpretation is preferred by default for practical
/// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
/// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
Expr,
/// In other contexts, notably in types, no ambiguity exists and paths can be written
/// without the disambiguator, e.g., `x<y>` - unambiguously a path.
/// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
Type,
/// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
/// visibilities or attributes.
/// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
/// (paths in "mod" contexts have to be checked later for absence of generic arguments
/// anyway, due to macros), but it is used to avoid weird suggestions about expected
/// tokens when something goes wrong.
Mod,
}
impl<'a> Parser<'a> {
/// Parses a qualified path.
/// Assumes that the leading `<` has been parsed already.
///
/// `qualified_path = <type [as trait_ref]>::path`
///
/// # Examples
/// `<T>::default`
/// `<T as U>::a`
/// `<T as U>::F::a<S>` (without disambiguator)
/// `<T as U>::F::a::<S>` (with disambiguator)
pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, Path)> {
let lo = self.prev_token.span;
let ty = self.parse_ty()?;
// `path` will contain the prefix of the path up to the `>`,
// if any (e.g., `U` in the `<T as U>::*` examples
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
if self.eat_keyword(kw::As) {
let path_lo = self.token.span;
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_token.span);
} else {
path_span = self.token.span.to(self.token.span);
path = ast::Path { segments: Vec::new(), span: path_span };
}
// See doc comment for `unmatched_angle_bracket_count`.
self.expect(&token::Gt)?;
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
}
if !self.recover_colon_before_qpath_proj() {
self.expect(&token::ModSep)?;
}
let qself = QSelf { ty, path_span, position: path.segments.len() };
self.parse_path_segments(&mut path.segments, style)?;
Ok((qself, Path { segments: path.segments, span: lo.to(self.prev_token.span) }))
}
/// Recover from an invalid single colon, when the user likely meant a qualified path.
/// We avoid emitting this if not followed by an identifier, as our assumption that the user
/// intended this to be a qualified path may not be correct.
///
/// ```ignore (diagnostics)
/// <Bar as Baz<T>>:Qux
/// ^ help: use double colon
/// ```
fn recover_colon_before_qpath_proj(&mut self) -> bool {
if self.token.kind != token::Colon
|| self.look_ahead(1, |t| !t.is_ident() || t.is_reserved_ident())
{
return false;
}
self.bump(); // colon
self.diagnostic()
.struct_span_err(
self.prev_token.span,
"found single colon before projection in qualified path",
)
.span_suggestion(
self.prev_token.span,
"use double colon",
"::".to_string(),
Applicability::MachineApplicable,
)
.emit();
true
}
/// Parses simple paths.
///
/// `path = [::] segment+`
/// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
///
/// # Examples
/// `a::b::C<D>` (without disambiguator)
/// `a::b::C::<D>` (with disambiguator)
/// `Fn(Args)` (without disambiguator)
/// `Fn::(Args)` (with disambiguator)
pub(super) fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> {
maybe_whole!(self, NtPath, |path| {
if style == PathStyle::Mod && path.segments.iter().any(|segment| segment.args.is_some())
{
self.struct_span_err(path.span, "unexpected generic arguments in path").emit();
}
path
});
let lo = self.token.span;
let mut segments = Vec::new();
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) {
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
self.parse_path_segments(&mut segments, style)?;
Ok(Path { segments, span: lo.to(self.prev_token.span) })
}
pub(super) fn parse_path_segments(
&mut self,
segments: &mut Vec<PathSegment>,
style: PathStyle,
) -> PResult<'a, ()> {
loop {
let segment = self.parse_path_segment(style)?;
if style == PathStyle::Expr {
// In order to check for trailing angle brackets, we must have finished
// recursing (`parse_path_segment` can indirectly call this function),
// that is, the next token must be the highlighted part of the below example:
//
// `Foo::<Bar as Baz<T>>::Qux`
// ^ here
//
// As opposed to the below highlight (if we had only finished the first
// recursion):
//
// `Foo::<Bar as Baz<T>>::Qux`
// ^ here
//
// `PathStyle::Expr` is only provided at the root invocation and never in
// `parse_path_segment` to recurse and therefore can be checked to maintain
// this invariant.
self.check_trailing_angle_brackets(&segment, &[&token::ModSep]);
}
segments.push(segment);
if self.is_import_coupler() || !self.eat(&token::ModSep) {
return Ok(());
}
}
}
pub(super) fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
let ident = self.parse_path_segment_ident()?;
let is_args_start = |token: &Token| match token.kind {
token::Lt
| token::BinOp(token::Shl)
| token::OpenDelim(token::Paren)
| token::LArrow => true,
_ => false,
};
let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(&[
TokenType::Token(token::Lt),
TokenType::Token(token::OpenDelim(token::Paren)),
]);
is_args_start(&this.token)
};
Ok(
if style == PathStyle::Type && check_args_start(self)
|| style != PathStyle::Mod
&& self.check(&token::ModSep)
&& self.look_ahead(1, |t| is_args_start(t))
{
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
// it isn't, then we reset the unmatched angle bracket count as we're about to start
// parsing a new path.
if style == PathStyle::Expr {
self.unmatched_angle_bracket_count = 0;
self.max_angle_bracket_count = 0;
}
// Generic arguments are found - `<`, `(`, `::<` or `::(`.
self.eat(&token::ModSep);
let lo = self.token.span;
let args = if self.eat_lt() {
// `<'a, T, A = U>`
let args =
self.parse_angle_args_with_leading_angle_bracket_recovery(style, lo)?;
self.expect_gt()?;
let span = lo.to(self.prev_token.span);
AngleBracketedArgs { args, span }.into()
} else {
// `(T, U) -> R`
let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
let span = ident.span.to(self.prev_token.span);
let output = self.parse_ret_ty(AllowPlus::No, RecoverQPath::No)?;
ParenthesizedArgs { inputs, output, span }.into()
};
PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
} else {
// Generic arguments are not found.
PathSegment::from_ident(ident)
},
)
}
pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident, false)) if ident.is_path_segment_keyword() => {
self.bump();
Ok(ident)
}
_ => self.parse_ident(),
}
}
/// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
/// For the purposes of understanding the parsing logic of generic arguments, this function
/// can be thought of being the same as just calling `self.parse_angle_args()` if the source
/// had the correct amount of leading angle brackets.
///
/// ```ignore (diagnostics)
/// bar::<<<<T as Foo>::Output>();
/// ^^ help: remove extra angle brackets
/// ```
fn parse_angle_args_with_leading_angle_bracket_recovery(
&mut self,
style: PathStyle,
lo: Span,
) -> PResult<'a, Vec<AngleBracketedArg>> {
// We need to detect whether there are extra leading left angle brackets and produce an
// appropriate error and suggestion. This cannot be implemented by looking ahead at
// upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
// then there won't be matching `>` tokens to find.
//
// To explain how this detection works, consider the following example:
//
// ```ignore (diagnostics)
// bar::<<<<T as Foo>::Output>();
// ^^ help: remove extra angle brackets
// ```
//
// Parsing of the left angle brackets starts in this function. We start by parsing the
// `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
// `eat_lt`):
//
// *Upcoming tokens:* `<<<<T as Foo>::Output>;`
// *Unmatched count:* 1
// *`parse_path_segment` calls deep:* 0
//
// This has the effect of recursing as this function is called if a `<` character
// is found within the expected generic arguments:
//
// *Upcoming tokens:* `<<<T as Foo>::Output>;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 1
//
// Eventually we will have recursed until having consumed all of the `<` tokens and
// this will be reflected in the count:
//
// *Upcoming tokens:* `T as Foo>::Output>;`
// *Unmatched count:* 4
// `parse_path_segment` calls deep:* 3
//
// The parser will continue until reaching the first `>` - this will decrement the
// unmatched angle bracket count and return to the parent invocation of this function
// having succeeded in parsing:
//
// *Upcoming tokens:* `::Output>;`
// *Unmatched count:* 3
// *`parse_path_segment` calls deep:* 2
//
// This will continue until the next `>` character which will also return successfully
// to the parent invocation of this function and decrement the count:
//
// *Upcoming tokens:* `;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 1
//
// At this point, this function will expect to find another matching `>` character but
// won't be able to and will return an error. This will continue all the way up the
// call stack until the first invocation:
//
// *Upcoming tokens:* `;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 0
//
// In doing this, we have managed to work out how many unmatched leading left angle
// brackets there are, but we cannot recover as the unmatched angle brackets have
// already been consumed. To remedy this, we keep a snapshot of the parser state
// before we do the above. We can then inspect whether we ended up with a parsing error
// and unmatched left angle brackets and if so, restore the parser state before we
// consumed any `<` characters to emit an error and consume the erroneous tokens to
// recover by attempting to parse again.
//
// In practice, the recursion of this function is indirect and there will be other
// locations that consume some `<` characters - as long as we update the count when
// this happens, it isn't an issue.
let is_first_invocation = style == PathStyle::Expr;
// Take a snapshot before attempting to parse - we can restore this later.
let snapshot = if is_first_invocation { Some(self.clone()) } else { None };
debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
match self.parse_angle_args() {
Ok(args) => Ok(args),
Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
// Cancel error from being unable to find `>`. We know the error
// must have been this due to a non-zero unmatched angle bracket
// count.
e.cancel();
// Swap `self` with our backup of the parser state before attempting to parse
// generic arguments.
let snapshot = mem::replace(self, snapshot.unwrap());
debug!(
"parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
snapshot.count={:?}",
snapshot.unmatched_angle_bracket_count,
);
// Eat the unmatched angle brackets.
for _ in 0..snapshot.unmatched_angle_bracket_count {
self.eat_lt();
}
// Make a span over ${unmatched angle bracket count} characters.
let span = lo.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count));
self.struct_span_err(
span,
&format!(
"unmatched angle bracket{}",
pluralize!(snapshot.unmatched_angle_bracket_count)
),
)
.span_suggestion(
span,
&format!(
"remove extra angle bracket{}",
pluralize!(snapshot.unmatched_angle_bracket_count)
),
String::new(),
Applicability::MachineApplicable,
)
.emit();
// Try again without unmatched angle bracket characters.
self.parse_angle_args()
}
Err(e) => Err(e),
}
}
/// Parses (possibly empty) list of generic arguments / associated item constraints,
/// possibly including trailing comma.
pub(super) fn parse_angle_args(&mut self) -> PResult<'a, Vec<AngleBracketedArg>> {
let mut args = Vec::new();
while let Some(arg) = self.parse_angle_arg()? {
args.push(arg);
if !self.eat(&token::Comma) {
break;
}
}
Ok(args)
}
/// Parses a single argument in the angle arguments `<...>` of a path segment.
fn parse_angle_arg(&mut self) -> PResult<'a, Option<AngleBracketedArg>> {
if self.check_ident() && self.look_ahead(1, |t| matches!(t.kind, token::Eq | token::Colon))
{
// Parse associated type constraint.
let lo = self.token.span;
let ident = self.parse_ident()?;
let kind = if self.eat(&token::Eq) {
let ty = self.parse_assoc_equality_term(ident, self.prev_token.span)?;
AssocTyConstraintKind::Equality { ty }
} else if self.eat(&token::Colon) {
let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?;
AssocTyConstraintKind::Bound { bounds }
} else {
unreachable!();
};
let span = lo.to(self.prev_token.span);
// Gate associated type bounds, e.g., `Iterator<Item: Ord>`.
if let AssocTyConstraintKind::Bound { .. } = kind {
self.sess.gated_spans.gate(sym::associated_type_bounds, span);
}
let constraint = AssocTyConstraint { id: ast::DUMMY_NODE_ID, ident, kind, span };
Ok(Some(AngleBracketedArg::Constraint(constraint)))
} else {
Ok(self.parse_generic_arg()?.map(AngleBracketedArg::Arg))
}
}
/// Parse the term to the right of an associated item equality constraint.
/// That is, parse `<term>` in `Item = <term>`.
/// Right now, this only admits types in `<term>`.
fn parse_assoc_equality_term(&mut self, ident: Ident, eq: Span) -> PResult<'a, P<ast::Ty>> {
let arg = self.parse_generic_arg()?;
let span = ident.span.to(self.prev_token.span);
match arg {
Some(GenericArg::Type(ty)) => return Ok(ty),
Some(GenericArg::Const(expr)) => {
self.struct_span_err(span, "cannot constrain an associated constant to a value")
.span_label(ident.span, "this associated constant...")
.span_label(expr.value.span, "...cannot be constrained to this value")
.emit();
}
Some(GenericArg::Lifetime(lt)) => {
self.struct_span_err(span, "associated lifetimes are not supported")
.span_label(lt.ident.span, "the lifetime is given here")
.help("if you meant to specify a trait object, write `dyn Trait + 'lifetime`")
.emit();
}
None => {
let after_eq = eq.shrink_to_hi();
let before_next = self.token.span.shrink_to_lo();
self.struct_span_err(after_eq.to(before_next), "missing type to the right of `=`")
.span_suggestion(
self.sess.source_map().next_point(eq).to(before_next),
"to constrain the associated type, add a type after `=`",
" TheType".to_string(),
Applicability::HasPlaceholders,
)
.span_suggestion(
eq.to(before_next),
&format!("remove the `=` if `{}` is a type", ident),
String::new(),
Applicability::MaybeIncorrect,
)
.emit();
}
}
Ok(self.mk_ty(span, ast::TyKind::Err))
}
/// Parse a generic argument in a path segment.
/// This does not include constraints, e.g., `Item = u8`, which is handled in `parse_angle_arg`.
fn parse_generic_arg(&mut self) -> PResult<'a, Option<GenericArg>> {
let arg = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
// Parse lifetime argument.
GenericArg::Lifetime(self.expect_lifetime())
} else if self.check_const_arg() {
// Parse const argument.
let expr = if let token::OpenDelim(token::Brace) = self.token.kind {
self.parse_block_expr(
None,
self.token.span,
BlockCheckMode::Default,
ast::AttrVec::new(),
)?
} else if self.token.is_ident() {
// FIXME(const_generics): to distinguish between idents for types and consts,
// we should introduce a GenericArg::Ident in the AST and distinguish when
// lowering to the HIR. For now, idents for const args are not permitted.
if self.token.is_bool_lit() {
self.parse_literal_maybe_minus()?
} else {
let span = self.token.span;
let msg = "identifiers may currently not be used for const generics";
self.struct_span_err(span, msg).emit();
let block = self.mk_block_err(span);
self.mk_expr(span, ast::ExprKind::Block(block, None), ast::AttrVec::new())
}
} else {
self.parse_literal_maybe_minus()?
};
GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value: expr })
} else if self.check_type() {
// Parse type argument.
GenericArg::Type(self.parse_ty()?)
} else {
return Ok(None);
};
Ok(Some(arg))
}
}

View file

@ -0,0 +1,427 @@
use super::attr::DEFAULT_INNER_ATTR_FORBIDDEN;
use super::diagnostics::Error;
use super::expr::LhsExpr;
use super::pat::GateOr;
use super::path::PathStyle;
use super::{BlockMode, Parser, Restrictions, SemiColonMode};
use crate::maybe_whole;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, TokenKind};
use rustc_ast::util::classify;
use rustc_ast::{AttrStyle, AttrVec, Attribute, MacCall, MacStmtStyle};
use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, Local, Stmt, StmtKind, DUMMY_NODE_ID};
use rustc_errors::{Applicability, PResult};
use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym};
use std::mem;
impl<'a> Parser<'a> {
/// Parses a statement. This stops just before trailing semicolons on everything but items.
/// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
pub(super) fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
Ok(self.parse_stmt_without_recovery().unwrap_or_else(|mut e| {
e.emit();
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
None
}))
}
fn parse_stmt_without_recovery(&mut self) -> PResult<'a, Option<Stmt>> {
maybe_whole!(self, NtStmt, |x| Some(x));
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
let stmt = if self.eat_keyword(kw::Let) {
self.parse_local_mk(lo, attrs.into())?
} else if self.is_kw_followed_by_ident(kw::Mut) {
self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
} else if self.is_kw_followed_by_ident(kw::Auto) {
self.bump(); // `auto`
let msg = "write `let` instead of `auto` to introduce a new variable";
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
} else if self.is_kw_followed_by_ident(sym::var) {
self.bump(); // `var`
let msg = "write `let` instead of `var` to introduce a new variable";
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
// that starts like a path (1 token), but it fact not a path.
// Also, we avoid stealing syntax from `parse_item_`.
self.parse_stmt_path_start(lo, attrs)?
} else if let Some(item) = self.parse_item_common(attrs.clone(), false, true, |_| true)? {
// FIXME: Bad copy of attrs
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if self.eat(&token::Semi) {
// Do not attempt to parse an expression if we're done here.
self.error_outer_attrs(&attrs);
self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(token::Brace) {
// Remainder are line-expr stmts.
let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
} else {
self.error_outer_attrs(&attrs);
return Ok(None);
};
Ok(Some(stmt))
}
fn parse_stmt_path_start(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, Stmt> {
let path = self.parse_path(PathStyle::Expr)?;
if self.eat(&token::Not) {
return self.parse_stmt_mac(lo, attrs.into(), path);
}
let expr = if self.check(&token::OpenDelim(token::Brace)) {
self.parse_struct_expr(path, AttrVec::new())?
} else {
let hi = self.prev_token.span;
self.mk_expr(lo.to(hi), ExprKind::Path(None, path), AttrVec::new())
};
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
})?;
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
}
/// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
/// At this point, the `!` token after the path has already been eaten.
fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
let args = self.parse_mac_args()?;
let delim = args.delim();
let hi = self.prev_token.span;
let style =
if delim == token::Brace { MacStmtStyle::Braces } else { MacStmtStyle::NoBraces };
let mac = MacCall { path, args, prior_type_ascription: self.last_type_ascription };
let kind = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof
{
StmtKind::MacCall(P((mac, style, attrs)))
} else {
// Since none of the above applied, this is an expression statement macro.
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
let e = self.maybe_recover_from_bad_qpath(e, true)?;
let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
StmtKind::Expr(e)
};
Ok(self.mk_stmt(lo.to(hi), kind))
}
/// Error on outer attributes in this context.
/// Also error if the previous token was a doc comment.
fn error_outer_attrs(&self, attrs: &[Attribute]) {
if let [.., last] = attrs {
if last.is_doc_comment() {
self.span_fatal_err(last.span, Error::UselessDocComment).emit();
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
self.struct_span_err(last.span, "expected statement after outer attribute").emit();
}
}
}
fn recover_stmt_local(
&mut self,
lo: Span,
attrs: AttrVec,
msg: &str,
sugg: &str,
) -> PResult<'a, Stmt> {
let stmt = self.parse_local_mk(lo, attrs)?;
self.struct_span_err(lo, "invalid variable declaration")
.span_suggestion(lo, msg, sugg.to_string(), Applicability::MachineApplicable)
.emit();
Ok(stmt)
}
fn parse_local_mk(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, Stmt> {
let local = self.parse_local(attrs)?;
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Local(local)))
}
/// Parses a local variable declaration.
fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
let lo = self.prev_token.span;
let pat = self.parse_top_pat(GateOr::Yes)?;
let (err, ty) = if self.eat(&token::Colon) {
// Save the state of the parser before parsing type normally, in case there is a `:`
// instead of an `=` typo.
let parser_snapshot_before_type = self.clone();
let colon_sp = self.prev_token.span;
match self.parse_ty() {
Ok(ty) => (None, Some(ty)),
Err(mut err) => {
if let Ok(snip) = self.span_to_snippet(pat.span) {
err.span_label(pat.span, format!("while parsing the type for `{}`", snip));
}
let err = if self.check(&token::Eq) {
err.emit();
None
} else {
// Rewind to before attempting to parse the type and continue parsing.
let parser_snapshot_after_type =
mem::replace(self, parser_snapshot_before_type);
Some((parser_snapshot_after_type, colon_sp, err))
};
(err, None)
}
}
} else {
(None, None)
};
let init = match (self.parse_initializer(err.is_some()), err) {
(Ok(init), None) => {
// init parsed, ty parsed
init
}
(Ok(init), Some((_, colon_sp, mut err))) => {
// init parsed, ty error
// Could parse the type as if it were the initializer, it is likely there was a
// typo in the code: `:` instead of `=`. Add suggestion and emit the error.
err.span_suggestion_short(
colon_sp,
"use `=` if you meant to assign",
" =".to_string(),
Applicability::MachineApplicable,
);
err.emit();
// As this was parsed successfully, continue as if the code has been fixed for the
// rest of the file. It will still fail due to the emitted error, but we avoid
// extra noise.
init
}
(Err(mut init_err), Some((snapshot, _, ty_err))) => {
// init error, ty error
init_err.cancel();
// Couldn't parse the type nor the initializer, only raise the type error and
// return to the parser state before parsing the type as the initializer.
// let x: <parse_error>;
*self = snapshot;
return Err(ty_err);
}
(Err(err), None) => {
// init error, ty parsed
// Couldn't parse the initializer and we're not attempting to recover a failed
// parse of the type, return the error.
return Err(err);
}
};
let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span };
Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs }))
}
/// Parses the RHS of a local variable declaration (e.g., '= 14;').
fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> {
let eq_consumed = match self.token.kind {
token::BinOpEq(..) => {
// Recover `let x <op>= 1` as `let x = 1`
self.struct_span_err(
self.token.span,
"can't reassign to an uninitialized variable",
)
.span_suggestion_short(
self.token.span,
"initialize the variable",
"=".to_string(),
Applicability::MaybeIncorrect,
)
.emit();
self.bump();
true
}
_ => self.eat(&token::Eq),
};
Ok(if eq_consumed || eq_optional { Some(self.parse_expr()?) } else { None })
}
/// Parses a block. No inner attributes are allowed.
pub(super) fn parse_block(&mut self) -> PResult<'a, P<Block>> {
let (attrs, block) = self.parse_inner_attrs_and_block()?;
if let [.., last] = &*attrs {
self.error_on_forbidden_inner_attr(last.span, DEFAULT_INNER_ATTR_FORBIDDEN);
}
Ok(block)
}
fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> {
let sp = self.token.span;
let tok = super::token_descr(&self.token);
let mut e = self.struct_span_err(sp, &format!("expected `{{`, found {}", tok));
let do_not_suggest_help = self.token.is_keyword(kw::In) || self.token == token::Colon;
// Check to see if the user has written something like
//
// if (cond)
// bar;
//
// which is valid in other languages, but not Rust.
match self.parse_stmt_without_recovery() {
// If the next token is an open brace (e.g., `if a b {`), the place-
// inside-a-block suggestion would be more likely wrong than right.
Ok(Some(_))
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
|| do_not_suggest_help => {}
Ok(Some(stmt)) => {
let stmt_own_line = self.sess.source_map().is_line_before_span_empty(sp);
let stmt_span = if stmt_own_line && self.eat(&token::Semi) {
// Expand the span to include the semicolon.
stmt.span.with_hi(self.prev_token.span.hi())
} else {
stmt.span
};
if let Ok(snippet) = self.span_to_snippet(stmt_span) {
e.span_suggestion(
stmt_span,
"try placing this code inside a block",
format!("{{ {} }}", snippet),
// Speculative; has been misleading in the past (#46836).
Applicability::MaybeIncorrect,
);
}
}
Err(mut e) => {
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
e.cancel();
}
_ => {}
}
e.span_label(sp, "expected `{`");
Err(e)
}
/// Parses a block. Inner attributes are allowed.
pub(super) fn parse_inner_attrs_and_block(
&mut self,
) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
self.parse_block_common(self.token.span, BlockCheckMode::Default)
}
/// Parses a block. Inner attributes are allowed.
pub(super) fn parse_block_common(
&mut self,
lo: Span,
blk_mode: BlockCheckMode,
) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
if !self.eat(&token::OpenDelim(token::Brace)) {
return self.error_block_no_opening_brace();
}
Ok((self.parse_inner_attributes()?, self.parse_block_tail(lo, blk_mode)?))
}
/// Parses the rest of a block expression or function body.
/// Precondition: already parsed the '{'.
fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
let mut stmts = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) {
if self.token == token::Eof {
break;
}
let stmt = match self.parse_full_stmt() {
Err(mut err) => {
self.maybe_annotate_with_ascription(&mut err, false);
err.emit();
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
Some(self.mk_stmt_err(self.token.span))
}
Ok(stmt) => stmt,
};
if let Some(stmt) = stmt {
stmts.push(stmt);
} else {
// Found only `;` or `}`.
continue;
};
}
Ok(self.mk_block(stmts, s, lo.to(self.prev_token.span)))
}
/// Parses a statement, including the trailing semicolon.
pub fn parse_full_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
// Skip looking for a trailing semicolon when we have an interpolated statement.
maybe_whole!(self, NtStmt, |x| Some(x));
let mut stmt = match self.parse_stmt_without_recovery()? {
Some(stmt) => stmt,
None => return Ok(None),
};
let mut eat_semi = true;
match stmt.kind {
// Expression without semicolon.
StmtKind::Expr(ref expr)
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
{
// Just check for errors and recover; do not eat semicolon yet.
if let Err(mut e) =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
{
if let TokenKind::DocComment(..) = self.token.kind {
if let Ok(snippet) = self.span_to_snippet(self.token.span) {
let sp = self.token.span;
let marker = &snippet[..3];
let (comment_marker, doc_comment_marker) = marker.split_at(2);
e.span_suggestion(
sp.with_hi(sp.lo() + BytePos(marker.len() as u32)),
&format!(
"add a space before `{}` to use a regular comment",
doc_comment_marker,
),
format!("{} {}", comment_marker, doc_comment_marker),
Applicability::MaybeIncorrect,
);
}
}
e.emit();
self.recover_stmt();
// Don't complain about type errors in body tail after parse error (#57383).
let sp = expr.span.to(self.prev_token.span);
stmt.kind = StmtKind::Expr(self.mk_expr_err(sp));
}
}
StmtKind::Local(..) => {
self.expect_semi()?;
eat_semi = false;
}
StmtKind::Empty => eat_semi = false,
_ => {}
}
if eat_semi && self.eat(&token::Semi) {
stmt = stmt.add_trailing_semicolon();
}
stmt.span = stmt.span.to(self.prev_token.span);
Ok(Some(stmt))
}
pub(super) fn mk_block(&self, stmts: Vec<Stmt>, rules: BlockCheckMode, span: Span) -> P<Block> {
P(Block { stmts, id: DUMMY_NODE_ID, rules, span })
}
pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt {
Stmt { id: DUMMY_NODE_ID, kind, span }
}
fn mk_stmt_err(&self, span: Span) -> Stmt {
self.mk_stmt(span, StmtKind::Expr(self.mk_expr_err(span)))
}
pub(super) fn mk_block_err(&self, span: Span) -> P<Block> {
self.mk_block(vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
}
}

View file

@ -0,0 +1,631 @@
use super::{Parser, PathStyle, TokenType};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token, TokenKind};
use rustc_ast::{self as ast, BareFnTy, FnRetTy, GenericParam, Lifetime, MutTy, Ty, TyKind};
use rustc_ast::{GenericBound, GenericBounds, MacCall, Mutability};
use rustc_ast::{PolyTraitRef, TraitBoundModifier, TraitObjectSyntax};
use rustc_errors::{pluralize, struct_span_err, Applicability, PResult};
use rustc_span::source_map::Span;
use rustc_span::symbol::{kw, sym};
/// Any `?` or `?const` modifiers that appear at the start of a bound.
struct BoundModifiers {
/// `?Trait`.
maybe: Option<Span>,
/// `?const Trait`.
maybe_const: Option<Span>,
}
impl BoundModifiers {
fn to_trait_bound_modifier(&self) -> TraitBoundModifier {
match (self.maybe, self.maybe_const) {
(None, None) => TraitBoundModifier::None,
(Some(_), None) => TraitBoundModifier::Maybe,
(None, Some(_)) => TraitBoundModifier::MaybeConst,
(Some(_), Some(_)) => TraitBoundModifier::MaybeConstMaybe,
}
}
}
#[derive(Copy, Clone, PartialEq)]
pub(super) enum AllowPlus {
Yes,
No,
}
#[derive(PartialEq)]
pub(super) enum RecoverQPath {
Yes,
No,
}
// Is `...` (`CVarArgs`) legal at this level of type parsing?
#[derive(PartialEq)]
enum AllowCVariadic {
Yes,
No,
}
/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
/// `IDENT<<u8 as Trait>::AssocTy>`.
///
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
/// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl)
}
impl<'a> Parser<'a> {
/// Parses a type.
pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(AllowPlus::Yes, RecoverQPath::Yes, AllowCVariadic::No)
}
/// Parse a type suitable for a function or function pointer parameter.
/// The difference from `parse_ty` is that this version allows `...`
/// (`CVarArgs`) at the top level of the the type.
pub(super) fn parse_ty_for_param(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(AllowPlus::Yes, RecoverQPath::Yes, AllowCVariadic::Yes)
}
/// Parses a type in restricted contexts where `+` is not permitted.
///
/// Example 1: `&'a TYPE`
/// `+` is prohibited to maintain operator priority (P(+) < P(&)).
/// Example 2: `value1 as TYPE + value2`
/// `+` is prohibited to avoid interactions with expression grammar.
pub(super) fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(AllowPlus::No, RecoverQPath::Yes, AllowCVariadic::No)
}
/// Parses an optional return type `[ -> TY ]` in a function declaration.
pub(super) fn parse_ret_ty(
&mut self,
allow_plus: AllowPlus,
recover_qpath: RecoverQPath,
) -> PResult<'a, FnRetTy> {
Ok(if self.eat(&token::RArrow) {
// FIXME(Centril): Can we unconditionally `allow_plus`?
let ty = self.parse_ty_common(allow_plus, recover_qpath, AllowCVariadic::No)?;
FnRetTy::Ty(ty)
} else {
FnRetTy::Default(self.token.span.shrink_to_lo())
})
}
fn parse_ty_common(
&mut self,
allow_plus: AllowPlus,
recover_qpath: RecoverQPath,
allow_c_variadic: AllowCVariadic,
) -> PResult<'a, P<Ty>> {
let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes;
maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
maybe_whole!(self, NtTy, |x| x);
let lo = self.token.span;
let mut impl_dyn_multi = false;
let kind = if self.check(&token::OpenDelim(token::Paren)) {
self.parse_ty_tuple_or_parens(lo, allow_plus)?
} else if self.eat(&token::Not) {
// Never type `!`
TyKind::Never
} else if self.eat(&token::BinOp(token::Star)) {
self.parse_ty_ptr()?
} else if self.eat(&token::OpenDelim(token::Bracket)) {
self.parse_array_or_slice_ty()?
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
} else if self.eat_keyword_noexpect(kw::Typeof) {
self.parse_typeof_ty()?
} else if self.eat_keyword(kw::Underscore) {
// A type to be inferred `_`
TyKind::Infer
} else if self.check_fn_front_matter() {
// Function pointer type
self.parse_ty_bare_fn(lo, Vec::new())?
} else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
if self.check_fn_front_matter() {
self.parse_ty_bare_fn(lo, lifetime_defs)?
} else {
let path = self.parse_path(PathStyle::Type)?;
let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
}
} else if self.eat_keyword(kw::Impl) {
self.parse_impl_ty(&mut impl_dyn_multi)?
} else if self.is_explicit_dyn_type() {
self.parse_dyn_ty(&mut impl_dyn_multi)?
} else if self.eat_lt() {
// Qualified path
let (qself, path) = self.parse_qpath(PathStyle::Type)?;
TyKind::Path(Some(qself), path)
} else if self.check_path() {
self.parse_path_start_ty(lo, allow_plus)?
} else if self.can_begin_bound() {
self.parse_bare_trait_object(lo, allow_plus)?
} else if self.eat(&token::DotDotDot) {
if allow_c_variadic == AllowCVariadic::Yes {
TyKind::CVarArgs
} else {
// FIXME(Centril): Should we just allow `...` syntactically
// anywhere in a type and use semantic restrictions instead?
self.error_illegal_c_varadic_ty(lo);
TyKind::Err
}
} else {
let msg = format!("expected type, found {}", super::token_descr(&self.token));
let mut err = self.struct_span_err(self.token.span, &msg);
err.span_label(self.token.span, "expected type");
self.maybe_annotate_with_ascription(&mut err, true);
return Err(err);
};
let span = lo.to(self.prev_token.span);
let ty = self.mk_ty(span, kind);
// Try to recover from use of `+` with incorrect priority.
self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?;
self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)
}
/// Parses either:
/// - `(TYPE)`, a parenthesized type.
/// - `(TYPE,)`, a tuple with a single field of type TYPE.
fn parse_ty_tuple_or_parens(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> {
let mut trailing_plus = false;
let (ts, trailing) = self.parse_paren_comma_seq(|p| {
let ty = p.parse_ty()?;
trailing_plus = p.prev_token.kind == TokenKind::BinOp(token::Plus);
Ok(ty)
})?;
if ts.len() == 1 && !trailing {
let ty = ts.into_iter().next().unwrap().into_inner();
let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
match ty.kind {
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
TyKind::Path(None, path) if maybe_bounds => {
self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
}
TyKind::TraitObject(bounds, TraitObjectSyntax::None)
if maybe_bounds && bounds.len() == 1 && !trailing_plus =>
{
self.parse_remaining_bounds(bounds, true)
}
// `(TYPE)`
_ => Ok(TyKind::Paren(P(ty))),
}
} else {
Ok(TyKind::Tup(ts))
}
}
fn parse_bare_trait_object(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> {
let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus());
let bounds = self.parse_generic_bounds_common(allow_plus, None)?;
if lt_no_plus {
self.struct_span_err(lo, "lifetime in trait object type must be followed by `+`").emit()
}
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
}
fn parse_remaining_bounds_path(
&mut self,
generic_params: Vec<GenericParam>,
path: ast::Path,
lo: Span,
parse_plus: bool,
) -> PResult<'a, TyKind> {
let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_token.span));
let bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
self.parse_remaining_bounds(bounds, parse_plus)
}
/// Parse the remainder of a bare trait object type given an already parsed list.
fn parse_remaining_bounds(
&mut self,
mut bounds: GenericBounds,
plus: bool,
) -> PResult<'a, TyKind> {
assert_ne!(self.token, token::Question);
if plus {
self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
bounds.append(&mut self.parse_generic_bounds(Some(self.prev_token.span))?);
}
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
}
/// Parses a raw pointer type: `*[const | mut] $type`.
fn parse_ty_ptr(&mut self) -> PResult<'a, TyKind> {
let mutbl = self.parse_const_or_mut().unwrap_or_else(|| {
let span = self.prev_token.span;
let msg = "expected mut or const in raw pointer type";
self.struct_span_err(span, msg)
.span_label(span, msg)
.help("use `*mut T` or `*const T` as appropriate")
.emit();
Mutability::Not
});
let ty = self.parse_ty_no_plus()?;
Ok(TyKind::Ptr(MutTy { ty, mutbl }))
}
/// Parses an array (`[TYPE; EXPR]`) or slice (`[TYPE]`) type.
/// The opening `[` bracket is already eaten.
fn parse_array_or_slice_ty(&mut self) -> PResult<'a, TyKind> {
let elt_ty = self.parse_ty()?;
let ty = if self.eat(&token::Semi) {
TyKind::Array(elt_ty, self.parse_anon_const_expr()?)
} else {
TyKind::Slice(elt_ty)
};
self.expect(&token::CloseDelim(token::Bracket))?;
Ok(ty)
}
fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
let mutbl = self.parse_mutability();
let ty = self.parse_ty_no_plus()?;
Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }))
}
// Parses the `typeof(EXPR)`.
// To avoid ambiguity, the type is surrounded by parenthesis.
fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
self.expect(&token::OpenDelim(token::Paren))?;
let expr = self.parse_anon_const_expr()?;
self.expect(&token::CloseDelim(token::Paren))?;
Ok(TyKind::Typeof(expr))
}
/// Parses a function pointer type (`TyKind::BareFn`).
/// ```
/// [unsafe] [extern "ABI"] fn (S) -> T
/// ^~~~~^ ^~~~^ ^~^ ^
/// | | | |
/// | | | Return type
/// Function Style ABI Parameter types
/// ```
/// We actually parse `FnHeader FnDecl`, but we error on `const` and `async` qualifiers.
fn parse_ty_bare_fn(&mut self, lo: Span, params: Vec<GenericParam>) -> PResult<'a, TyKind> {
let ast::FnHeader { ext, unsafety, constness, asyncness } = self.parse_fn_front_matter()?;
let decl = self.parse_fn_decl(|_| false, AllowPlus::No)?;
let whole_span = lo.to(self.prev_token.span);
if let ast::Const::Yes(span) = constness {
self.error_fn_ptr_bad_qualifier(whole_span, span, "const");
}
if let ast::Async::Yes { span, .. } = asyncness {
self.error_fn_ptr_bad_qualifier(whole_span, span, "async");
}
Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl })))
}
/// Emit an error for the given bad function pointer qualifier.
fn error_fn_ptr_bad_qualifier(&self, span: Span, qual_span: Span, qual: &str) {
self.struct_span_err(span, &format!("an `fn` pointer type cannot be `{}`", qual))
.span_label(qual_span, format!("`{}` because of this", qual))
.span_suggestion_short(
qual_span,
&format!("remove the `{}` qualifier", qual),
String::new(),
Applicability::MaybeIncorrect,
)
.emit();
}
/// Parses an `impl B0 + ... + Bn` type.
fn parse_impl_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
}
/// Is a `dyn B0 + ... + Bn` type allowed here?
fn is_explicit_dyn_type(&mut self) -> bool {
self.check_keyword(kw::Dyn)
&& (self.token.uninterpolated_span().rust_2018()
|| self.look_ahead(1, |t| {
t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t)
}))
}
/// Parses a `dyn B0 + ... + Bn` type.
///
/// Note that this does *not* parse bare trait objects.
fn parse_dyn_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
self.bump(); // `dyn`
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn))
}
/// Parses a type starting with a path.
///
/// This can be:
/// 1. a type macro, `mac!(...)`,
/// 2. a bare trait object, `B0 + ... + Bn`,
/// 3. or a path, `path::to::MyType`.
fn parse_path_start_ty(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> {
// Simple path
let path = self.parse_path(PathStyle::Type)?;
if self.eat(&token::Not) {
// Macro invocation in type position
Ok(TyKind::MacCall(MacCall {
path,
args: self.parse_mac_args()?,
prior_type_ascription: self.last_type_ascription,
}))
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
// `Trait1 + Trait2 + 'a`
self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
} else {
// Just a type path.
Ok(TyKind::Path(None, path))
}
}
fn error_illegal_c_varadic_ty(&self, lo: Span) {
struct_span_err!(
self.sess.span_diagnostic,
lo.to(self.prev_token.span),
E0743,
"C-variadic type `...` may not be nested inside another type",
)
.emit();
}
pub(super) fn parse_generic_bounds(
&mut self,
colon_span: Option<Span>,
) -> PResult<'a, GenericBounds> {
self.parse_generic_bounds_common(AllowPlus::Yes, colon_span)
}
/// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
///
/// See `parse_generic_bound` for the `BOUND` grammar.
fn parse_generic_bounds_common(
&mut self,
allow_plus: AllowPlus,
colon_span: Option<Span>,
) -> PResult<'a, GenericBounds> {
let mut bounds = Vec::new();
let mut negative_bounds = Vec::new();
while self.can_begin_bound() {
match self.parse_generic_bound()? {
Ok(bound) => bounds.push(bound),
Err(neg_sp) => negative_bounds.push(neg_sp),
}
if allow_plus == AllowPlus::No || !self.eat_plus() {
break;
}
}
if !negative_bounds.is_empty() {
self.error_negative_bounds(colon_span, &bounds, negative_bounds);
}
Ok(bounds)
}
/// Can the current token begin a bound?
fn can_begin_bound(&mut self) -> bool {
// This needs to be synchronized with `TokenKind::can_begin_bound`.
self.check_path()
|| self.check_lifetime()
|| self.check(&token::Not) // Used for error reporting only.
|| self.check(&token::Question)
|| self.check_keyword(kw::For)
|| self.check(&token::OpenDelim(token::Paren))
}
fn error_negative_bounds(
&self,
colon_span: Option<Span>,
bounds: &[GenericBound],
negative_bounds: Vec<Span>,
) {
let negative_bounds_len = negative_bounds.len();
let last_span = *negative_bounds.last().expect("no negative bounds, but still error?");
let mut err = self.struct_span_err(negative_bounds, "negative bounds are not supported");
err.span_label(last_span, "negative bounds are not supported");
if let Some(bound_list) = colon_span {
let bound_list = bound_list.to(self.prev_token.span);
let mut new_bound_list = String::new();
if !bounds.is_empty() {
let mut snippets = bounds.iter().map(|bound| self.span_to_snippet(bound.span()));
while let Some(Ok(snippet)) = snippets.next() {
new_bound_list.push_str(" + ");
new_bound_list.push_str(&snippet);
}
new_bound_list = new_bound_list.replacen(" +", ":", 1);
}
err.tool_only_span_suggestion(
bound_list,
&format!("remove the bound{}", pluralize!(negative_bounds_len)),
new_bound_list,
Applicability::MachineApplicable,
);
}
err.emit();
}
/// Parses a bound according to the grammar:
/// ```
/// BOUND = TY_BOUND | LT_BOUND
/// ```
fn parse_generic_bound(&mut self) -> PResult<'a, Result<GenericBound, Span>> {
let anchor_lo = self.prev_token.span;
let lo = self.token.span;
let has_parens = self.eat(&token::OpenDelim(token::Paren));
let inner_lo = self.token.span;
let is_negative = self.eat(&token::Not);
let modifiers = self.parse_ty_bound_modifiers();
let bound = if self.token.is_lifetime() {
self.error_lt_bound_with_modifiers(modifiers);
self.parse_generic_lt_bound(lo, inner_lo, has_parens)?
} else {
self.parse_generic_ty_bound(lo, has_parens, modifiers)?
};
Ok(if is_negative { Err(anchor_lo.to(self.prev_token.span)) } else { Ok(bound) })
}
/// Parses a lifetime ("outlives") bound, e.g. `'a`, according to:
/// ```
/// LT_BOUND = LIFETIME
/// ```
fn parse_generic_lt_bound(
&mut self,
lo: Span,
inner_lo: Span,
has_parens: bool,
) -> PResult<'a, GenericBound> {
let bound = GenericBound::Outlives(self.expect_lifetime());
if has_parens {
// FIXME(Centril): Consider not erroring here and accepting `('lt)` instead,
// possibly introducing `GenericBound::Paren(P<GenericBound>)`?
self.recover_paren_lifetime(lo, inner_lo)?;
}
Ok(bound)
}
/// Emits an error if any trait bound modifiers were present.
fn error_lt_bound_with_modifiers(&self, modifiers: BoundModifiers) {
if let Some(span) = modifiers.maybe_const {
self.struct_span_err(
span,
"`?const` may only modify trait bounds, not lifetime bounds",
)
.emit();
}
if let Some(span) = modifiers.maybe {
self.struct_span_err(span, "`?` may only modify trait bounds, not lifetime bounds")
.emit();
}
}
/// Recover on `('lifetime)` with `(` already eaten.
fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> {
let inner_span = inner_lo.to(self.prev_token.span);
self.expect(&token::CloseDelim(token::Paren))?;
let mut err = self.struct_span_err(
lo.to(self.prev_token.span),
"parenthesized lifetime bounds are not supported",
);
if let Ok(snippet) = self.span_to_snippet(inner_span) {
err.span_suggestion_short(
lo.to(self.prev_token.span),
"remove the parentheses",
snippet,
Applicability::MachineApplicable,
);
}
err.emit();
Ok(())
}
/// Parses the modifiers that may precede a trait in a bound, e.g. `?Trait` or `?const Trait`.
///
/// If no modifiers are present, this does not consume any tokens.
///
/// ```
/// TY_BOUND_MODIFIERS = "?" ["const" ["?"]]
/// ```
fn parse_ty_bound_modifiers(&mut self) -> BoundModifiers {
if !self.eat(&token::Question) {
return BoundModifiers { maybe: None, maybe_const: None };
}
// `? ...`
let first_question = self.prev_token.span;
if !self.eat_keyword(kw::Const) {
return BoundModifiers { maybe: Some(first_question), maybe_const: None };
}
// `?const ...`
let maybe_const = first_question.to(self.prev_token.span);
self.sess.gated_spans.gate(sym::const_trait_bound_opt_out, maybe_const);
if !self.eat(&token::Question) {
return BoundModifiers { maybe: None, maybe_const: Some(maybe_const) };
}
// `?const ? ...`
let second_question = self.prev_token.span;
BoundModifiers { maybe: Some(second_question), maybe_const: Some(maybe_const) }
}
/// Parses a type bound according to:
/// ```
/// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
/// TY_BOUND_NOPAREN = [TY_BOUND_MODIFIERS] [for<LT_PARAM_DEFS>] SIMPLE_PATH
/// ```
///
/// For example, this grammar accepts `?const ?for<'a: 'b> m::Trait<'a>`.
fn parse_generic_ty_bound(
&mut self,
lo: Span,
has_parens: bool,
modifiers: BoundModifiers,
) -> PResult<'a, GenericBound> {
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
let path = self.parse_path(PathStyle::Type)?;
if has_parens {
self.expect(&token::CloseDelim(token::Paren))?;
}
let modifier = modifiers.to_trait_bound_modifier();
let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_token.span));
Ok(GenericBound::Trait(poly_trait, modifier))
}
/// Optionally parses `for<$generic_params>`.
pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
if self.eat_keyword(kw::For) {
self.expect_lt()?;
let params = self.parse_generic_params()?;
self.expect_gt()?;
// We rely on AST validation to rule out invalid cases: There must not be type
// parameters, and the lifetime parameters must not have bounds.
Ok(params)
} else {
Ok(Vec::new())
}
}
pub(super) fn check_lifetime(&mut self) -> bool {
self.expected_tokens.push(TokenType::Lifetime);
self.token.is_lifetime()
}
/// Parses a single lifetime `'a` or panics.
pub(super) fn expect_lifetime(&mut self) -> Lifetime {
if let Some(ident) = self.token.lifetime() {
self.bump();
Lifetime { ident, id: ast::DUMMY_NODE_ID }
} else {
self.span_bug(self.token.span, "not a lifetime")
}
}
pub(super) fn mk_ty(&self, span: Span, kind: TyKind) -> P<Ty> {
P(Ty { kind, span, id: ast::DUMMY_NODE_ID })
}
}