Auto merge of #60767 - Centril:rollup-4cbsb73, r=Centril
Rollup of 4 pull requests Successful merges: - #60694 (Fix HIR printing of existential type #60662) - #60750 (syntax: Remove some legacy nonterminal tokens) - #60751 (Assorted cleanup in parser & AST validation) - #60752 (Fix minor typos for ItemLocalId) Failed merges: r? @ghost
This commit is contained in:
commit
4443957f27
9 changed files with 80 additions and 89 deletions
|
@ -126,12 +126,12 @@ mod item_local_id_inner {
|
|||
use rustc_macros::HashStable;
|
||||
newtype_index! {
|
||||
/// An `ItemLocalId` uniquely identifies something within a given "item-like",
|
||||
/// that is within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no
|
||||
/// that is, within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no
|
||||
/// guarantee that the numerical value of a given `ItemLocalId` corresponds to
|
||||
/// the node's position within the owning item in any way, but there is a
|
||||
/// guarantee that the `LocalItemId`s within an owner occupy a dense range of
|
||||
/// integers starting at zero, so a mapping that maps all or most nodes within
|
||||
/// an "item-like" to something else can be implement by a `Vec` instead of a
|
||||
/// an "item-like" to something else can be implemented by a `Vec` instead of a
|
||||
/// tree or hash map.
|
||||
pub struct ItemLocalId {
|
||||
derive [HashStable]
|
||||
|
|
|
@ -618,7 +618,6 @@ impl<'a> State<'a> {
|
|||
|
||||
self.print_where_clause(&exist.generics.where_clause)?;
|
||||
self.s.space()?;
|
||||
self.word_space(":")?;
|
||||
let mut real_bounds = Vec::with_capacity(exist.bounds.len());
|
||||
for b in exist.bounds.iter() {
|
||||
if let GenericBound::Trait(ref ptr, hir::TraitBoundModifier::Maybe) = *b {
|
||||
|
|
|
@ -54,21 +54,21 @@ struct AstValidator<'a> {
|
|||
has_proc_macro_decls: bool,
|
||||
has_global_allocator: bool,
|
||||
|
||||
// Used to ban nested `impl Trait`, e.g., `impl Into<impl Debug>`.
|
||||
// Nested `impl Trait` _is_ allowed in associated type position,
|
||||
// e.g `impl Iterator<Item=impl Debug>`
|
||||
/// Used to ban nested `impl Trait`, e.g., `impl Into<impl Debug>`.
|
||||
/// Nested `impl Trait` _is_ allowed in associated type position,
|
||||
/// e.g `impl Iterator<Item=impl Debug>`
|
||||
outer_impl_trait: Option<OuterImplTrait>,
|
||||
|
||||
// Used to ban `impl Trait` in path projections like `<impl Iterator>::Item`
|
||||
// or `Foo::Bar<impl Trait>`
|
||||
/// Used to ban `impl Trait` in path projections like `<impl Iterator>::Item`
|
||||
/// or `Foo::Bar<impl Trait>`
|
||||
is_impl_trait_banned: bool,
|
||||
|
||||
// rust-lang/rust#57979: the ban of nested `impl Trait` was buggy
|
||||
// until PRs #57730 and #57981 landed: it would jump directly to
|
||||
// walk_ty rather than visit_ty (or skip recurring entirely for
|
||||
// impl trait in projections), and thus miss some cases. We track
|
||||
// whether we should downgrade to a warning for short-term via
|
||||
// these booleans.
|
||||
/// rust-lang/rust#57979: the ban of nested `impl Trait` was buggy
|
||||
/// until PRs #57730 and #57981 landed: it would jump directly to
|
||||
/// walk_ty rather than visit_ty (or skip recurring entirely for
|
||||
/// impl trait in projections), and thus miss some cases. We track
|
||||
/// whether we should downgrade to a warning for short-term via
|
||||
/// these booleans.
|
||||
warning_period_57979_didnt_record_next_impl_trait: bool,
|
||||
warning_period_57979_impl_trait_in_proj: bool,
|
||||
}
|
||||
|
|
|
@ -663,7 +663,6 @@ pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis:
|
|||
token::NtMeta(meta) => vis.visit_meta_item(meta),
|
||||
token::NtPath(path) => vis.visit_path(path),
|
||||
token::NtTT(tt) => vis.visit_tt(tt),
|
||||
token::NtArm(arm) => vis.visit_arm(arm),
|
||||
token::NtImplItem(item) =>
|
||||
visit_clobber(item, |item| {
|
||||
// See reasoning above.
|
||||
|
@ -676,9 +675,6 @@ pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis:
|
|||
vis.flat_map_trait_item(item)
|
||||
.expect_one("expected visitor to produce exactly one item")
|
||||
}),
|
||||
token::NtGenerics(generics) => vis.visit_generics(generics),
|
||||
token::NtWhereClause(where_clause) => vis.visit_where_clause(where_clause),
|
||||
token::NtArg(arg) => vis.visit_arg(arg),
|
||||
token::NtVis(visib) => vis.visit_vis(visib),
|
||||
token::NtForeignItem(item) =>
|
||||
visit_clobber(item, |item| {
|
||||
|
|
|
@ -1833,7 +1833,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(MutTy { ty: t, mutbl: mutbl })
|
||||
}
|
||||
|
||||
fn is_named_argument(&mut self) -> bool {
|
||||
fn is_named_argument(&self) -> bool {
|
||||
let offset = match self.token {
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||
|
@ -1881,8 +1881,6 @@ impl<'a> Parser<'a> {
|
|||
/// This version of parse arg doesn't necessarily require identifier names.
|
||||
fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool,
|
||||
allow_c_variadic: bool) -> PResult<'a, Arg> {
|
||||
maybe_whole!(self, NtArg, |x| x);
|
||||
|
||||
if let Ok(Some(_)) = self.parse_self_arg() {
|
||||
let mut err = self.struct_span_err(self.prev_span,
|
||||
"unexpected `self` argument in function");
|
||||
|
@ -2345,27 +2343,27 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
|
||||
fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
|
||||
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
|
||||
}
|
||||
|
||||
fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
|
||||
fn mk_unary(&self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
|
||||
ExprKind::Unary(unop, expr)
|
||||
}
|
||||
|
||||
fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
||||
fn mk_binary(&self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
||||
ExprKind::Binary(binop, lhs, rhs)
|
||||
}
|
||||
|
||||
fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
|
||||
fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
|
||||
ExprKind::Call(f, args)
|
||||
}
|
||||
|
||||
fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
|
||||
fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
|
||||
ExprKind::Index(expr, idx)
|
||||
}
|
||||
|
||||
fn mk_range(&mut self,
|
||||
fn mk_range(&self,
|
||||
start: Option<P<Expr>>,
|
||||
end: Option<P<Expr>>,
|
||||
limits: RangeLimits)
|
||||
|
@ -2377,7 +2375,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn mk_assign_op(&mut self, binop: ast::BinOp,
|
||||
fn mk_assign_op(&self, binop: ast::BinOp,
|
||||
lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
||||
ExprKind::AssignOp(binop, lhs, rhs)
|
||||
}
|
||||
|
@ -2517,13 +2515,12 @@ impl<'a> Parser<'a> {
|
|||
hi = path.span;
|
||||
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
|
||||
}
|
||||
if self.span.rust_2018() && self.check_keyword(keywords::Async)
|
||||
{
|
||||
if self.is_async_block() { // check for `async {` and `async move {`
|
||||
return self.parse_async_block(attrs);
|
||||
if self.span.rust_2018() && self.check_keyword(keywords::Async) {
|
||||
return if self.is_async_block() { // check for `async {` and `async move {`
|
||||
self.parse_async_block(attrs)
|
||||
} else {
|
||||
return self.parse_lambda_expr(attrs);
|
||||
}
|
||||
self.parse_lambda_expr(attrs)
|
||||
};
|
||||
}
|
||||
if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
|
||||
return self.parse_lambda_expr(attrs);
|
||||
|
@ -3448,7 +3445,8 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
self.restrictions
|
||||
};
|
||||
if op.precedence() < min_prec {
|
||||
let prec = op.precedence();
|
||||
if prec < min_prec {
|
||||
break;
|
||||
}
|
||||
// Check for deprecated `...` syntax
|
||||
|
@ -3489,8 +3487,7 @@ impl<'a> Parser<'a> {
|
|||
// We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
|
||||
// two variants are handled with `parse_prefix_range_expr` call above.
|
||||
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
||||
Some(self.parse_assoc_expr_with(op.precedence() + 1,
|
||||
LhsExpr::NotYetParsed)?)
|
||||
Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -3510,28 +3507,18 @@ impl<'a> Parser<'a> {
|
|||
break
|
||||
}
|
||||
|
||||
let rhs = match op.fixity() {
|
||||
Fixity::Right => self.with_res(
|
||||
restrictions - Restrictions::STMT_EXPR,
|
||||
|this| {
|
||||
this.parse_assoc_expr_with(op.precedence(),
|
||||
LhsExpr::NotYetParsed)
|
||||
}),
|
||||
Fixity::Left => self.with_res(
|
||||
restrictions - Restrictions::STMT_EXPR,
|
||||
|this| {
|
||||
this.parse_assoc_expr_with(op.precedence() + 1,
|
||||
LhsExpr::NotYetParsed)
|
||||
}),
|
||||
let fixity = op.fixity();
|
||||
let prec_adjustment = match fixity {
|
||||
Fixity::Right => 0,
|
||||
Fixity::Left => 1,
|
||||
// We currently have no non-associative operators that are not handled above by
|
||||
// the special cases. The code is here only for future convenience.
|
||||
Fixity::None => self.with_res(
|
||||
Fixity::None => 1,
|
||||
};
|
||||
let rhs = self.with_res(
|
||||
restrictions - Restrictions::STMT_EXPR,
|
||||
|this| {
|
||||
this.parse_assoc_expr_with(op.precedence() + 1,
|
||||
LhsExpr::NotYetParsed)
|
||||
}),
|
||||
}?;
|
||||
|this| this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
|
||||
)?;
|
||||
|
||||
// Make sure that the span of the parent node is larger than the span of lhs and rhs,
|
||||
// including the attributes.
|
||||
|
@ -3577,7 +3564,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
};
|
||||
|
||||
if op.fixity() == Fixity::None { break }
|
||||
if let Fixity::None = fixity { break }
|
||||
}
|
||||
Ok(lhs)
|
||||
}
|
||||
|
@ -3714,7 +3701,7 @@ impl<'a> Parser<'a> {
|
|||
/// Produce an error if comparison operators are chained (RFC #558).
|
||||
/// We only need to check lhs, not rhs, because all comparison ops
|
||||
/// have same precedence and are left-associative
|
||||
fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: &AssocOp) {
|
||||
fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) {
|
||||
debug_assert!(outer_op.is_comparison(),
|
||||
"check_no_chained_comparison: {:?} is not comparison",
|
||||
outer_op);
|
||||
|
@ -4053,8 +4040,6 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||
maybe_whole!(self, NtArm, |x| x);
|
||||
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let pats = self.parse_pats()?;
|
||||
let guard = if self.eat_keyword(keywords::If) {
|
||||
|
@ -5011,7 +4996,7 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn is_async_block(&mut self) -> bool {
|
||||
fn is_async_block(&self) -> bool {
|
||||
self.token.is_keyword(keywords::Async) &&
|
||||
(
|
||||
( // `async move {`
|
||||
|
@ -5023,19 +5008,19 @@ impl<'a> Parser<'a> {
|
|||
)
|
||||
}
|
||||
|
||||
fn is_async_fn(&mut self) -> bool {
|
||||
fn is_async_fn(&self) -> bool {
|
||||
self.token.is_keyword(keywords::Async) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
|
||||
}
|
||||
|
||||
fn is_do_catch_block(&mut self) -> bool {
|
||||
fn is_do_catch_block(&self) -> bool {
|
||||
self.token.is_keyword(keywords::Do) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
|
||||
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
|
||||
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||
}
|
||||
|
||||
fn is_try_block(&mut self) -> bool {
|
||||
fn is_try_block(&self) -> bool {
|
||||
self.token.is_keyword(keywords::Try) &&
|
||||
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
|
||||
self.span.rust_2018() &&
|
||||
|
@ -5057,7 +5042,7 @@ impl<'a> Parser<'a> {
|
|||
self.look_ahead(1, |t| t.is_keyword(keywords::Type))
|
||||
}
|
||||
|
||||
fn is_auto_trait_item(&mut self) -> bool {
|
||||
fn is_auto_trait_item(&self) -> bool {
|
||||
// auto trait
|
||||
(self.token.is_keyword(keywords::Auto)
|
||||
&& self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
|
||||
|
@ -5319,7 +5304,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
/// Checks if this expression is a successfully parsed statement.
|
||||
fn expr_is_complete(&mut self, e: &Expr) -> bool {
|
||||
fn expr_is_complete(&self, e: &Expr) -> bool {
|
||||
self.restrictions.contains(Restrictions::STMT_EXPR) &&
|
||||
!classify::expr_requires_semi_to_be_stmt(e)
|
||||
}
|
||||
|
@ -5789,8 +5774,6 @@ impl<'a> Parser<'a> {
|
|||
/// | ( < lifetimes , typaramseq ( , )? > )
|
||||
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
|
||||
fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
|
||||
maybe_whole!(self, NtGenerics, |x| x);
|
||||
|
||||
let span_lo = self.span;
|
||||
if self.eat_lt() {
|
||||
let params = self.parse_generic_params()?;
|
||||
|
@ -6043,8 +6026,6 @@ impl<'a> Parser<'a> {
|
|||
/// where T : Trait<U, V> + 'b, 'a : 'b
|
||||
/// ```
|
||||
fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
|
||||
maybe_whole!(self, NtWhereClause, |x| x);
|
||||
|
||||
let mut where_clause = WhereClause {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
predicates: Vec::new(),
|
||||
|
@ -6391,7 +6372,7 @@ impl<'a> Parser<'a> {
|
|||
Ok((id, generics))
|
||||
}
|
||||
|
||||
fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
|
||||
fn mk_item(&self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
|
||||
attrs: Vec<Attribute>) -> P<Item> {
|
||||
P(Item {
|
||||
ident,
|
||||
|
@ -6423,7 +6404,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Returns `true` if we are looking at `const ID`
|
||||
/// (returns `false` for things like `const fn`, etc.).
|
||||
fn is_const_item(&mut self) -> bool {
|
||||
fn is_const_item(&self) -> bool {
|
||||
self.token.is_keyword(keywords::Const) &&
|
||||
!self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
|
||||
!self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
|
||||
|
@ -6531,7 +6512,7 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) {
|
||||
fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) {
|
||||
match *vis {
|
||||
VisibilityKind::Inherited => {}
|
||||
_ => {
|
||||
|
@ -6560,7 +6541,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn missing_assoc_item_kind_err(&mut self, item_type: &str, prev_span: Span)
|
||||
fn missing_assoc_item_kind_err(&self, item_type: &str, prev_span: Span)
|
||||
-> DiagnosticBuilder<'a>
|
||||
{
|
||||
let expected_kinds = if item_type == "extern" {
|
||||
|
|
|
@ -597,14 +597,12 @@ pub enum Nonterminal {
|
|||
NtPath(ast::Path),
|
||||
NtVis(ast::Visibility),
|
||||
NtTT(TokenTree),
|
||||
// These are not exposed to macros, but are used by quasiquote.
|
||||
NtArm(ast::Arm),
|
||||
NtImplItem(ast::ImplItem),
|
||||
// Used only for passing items to proc macro attributes (they are not
|
||||
// strictly necessary for that, `Annotatable` can be converted into
|
||||
// tokens directly, but doing that naively regresses pretty-printing).
|
||||
NtTraitItem(ast::TraitItem),
|
||||
NtImplItem(ast::ImplItem),
|
||||
NtForeignItem(ast::ForeignItem),
|
||||
NtGenerics(ast::Generics),
|
||||
NtWhereClause(ast::WhereClause),
|
||||
NtArg(ast::Arg),
|
||||
}
|
||||
|
||||
impl PartialEq for Nonterminal {
|
||||
|
@ -637,13 +635,9 @@ impl fmt::Debug for Nonterminal {
|
|||
NtMeta(..) => f.pad("NtMeta(..)"),
|
||||
NtPath(..) => f.pad("NtPath(..)"),
|
||||
NtTT(..) => f.pad("NtTT(..)"),
|
||||
NtArm(..) => f.pad("NtArm(..)"),
|
||||
NtImplItem(..) => f.pad("NtImplItem(..)"),
|
||||
NtTraitItem(..) => f.pad("NtTraitItem(..)"),
|
||||
NtForeignItem(..) => f.pad("NtForeignItem(..)"),
|
||||
NtGenerics(..) => f.pad("NtGenerics(..)"),
|
||||
NtWhereClause(..) => f.pad("NtWhereClause(..)"),
|
||||
NtArg(..) => f.pad("NtArg(..)"),
|
||||
NtVis(..) => f.pad("NtVis(..)"),
|
||||
NtLifetime(..) => f.pad("NtLifetime(..)"),
|
||||
}
|
||||
|
|
|
@ -263,12 +263,8 @@ pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
|||
token::NtLifetime(e) => ident_to_string(e),
|
||||
token::NtLiteral(ref e) => expr_to_string(e),
|
||||
token::NtTT(ref tree) => tt_to_string(tree.clone()),
|
||||
token::NtArm(ref e) => arm_to_string(e),
|
||||
token::NtImplItem(ref e) => impl_item_to_string(e),
|
||||
token::NtTraitItem(ref e) => trait_item_to_string(e),
|
||||
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
|
||||
token::NtWhereClause(ref e) => where_clause_to_string(e),
|
||||
token::NtArg(ref e) => arg_to_string(e),
|
||||
token::NtVis(ref e) => vis_to_string(e),
|
||||
token::NtForeignItem(ref e) => foreign_item_to_string(e),
|
||||
}
|
||||
|
|
11
src/test/ui/issues/issue-60662.rs
Normal file
11
src/test/ui/issues/issue-60662.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
// compile-pass
|
||||
// compile-flags: -Z unpretty=hir
|
||||
|
||||
#![feature(existential_type)]
|
||||
|
||||
trait Animal {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
pub existential type ServeFut: Animal;
|
||||
}
|
14
src/test/ui/issues/issue-60662.stdout
Normal file
14
src/test/ui/issues/issue-60662.stdout
Normal file
|
@ -0,0 +1,14 @@
|
|||
// compile-pass
|
||||
// compile-flags: -Z unpretty=hir
|
||||
|
||||
#![feature(existential_type)]
|
||||
#[prelude_import]
|
||||
use ::std::prelude::v1::*;
|
||||
#[macro_use]
|
||||
extern crate std;
|
||||
|
||||
trait Animal { }
|
||||
|
||||
fn main() {
|
||||
pub existential type ServeFut : Animal;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue