Auto merge of #60767 - Centril:rollup-4cbsb73, r=Centril
Rollup of 4 pull requests Successful merges: - #60694 (Fix HIR printing of existential type #60662) - #60750 (syntax: Remove some legacy nonterminal tokens) - #60751 (Assorted cleanup in parser & AST validation) - #60752 (Fix minor typos for ItemLocalId) Failed merges: r? @ghost
This commit is contained in:
commit
4443957f27
9 changed files with 80 additions and 89 deletions
|
@ -126,12 +126,12 @@ mod item_local_id_inner {
|
||||||
use rustc_macros::HashStable;
|
use rustc_macros::HashStable;
|
||||||
newtype_index! {
|
newtype_index! {
|
||||||
/// An `ItemLocalId` uniquely identifies something within a given "item-like",
|
/// An `ItemLocalId` uniquely identifies something within a given "item-like",
|
||||||
/// that is within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no
|
/// that is, within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no
|
||||||
/// guarantee that the numerical value of a given `ItemLocalId` corresponds to
|
/// guarantee that the numerical value of a given `ItemLocalId` corresponds to
|
||||||
/// the node's position within the owning item in any way, but there is a
|
/// the node's position within the owning item in any way, but there is a
|
||||||
/// guarantee that the `LocalItemId`s within an owner occupy a dense range of
|
/// guarantee that the `LocalItemId`s within an owner occupy a dense range of
|
||||||
/// integers starting at zero, so a mapping that maps all or most nodes within
|
/// integers starting at zero, so a mapping that maps all or most nodes within
|
||||||
/// an "item-like" to something else can be implement by a `Vec` instead of a
|
/// an "item-like" to something else can be implemented by a `Vec` instead of a
|
||||||
/// tree or hash map.
|
/// tree or hash map.
|
||||||
pub struct ItemLocalId {
|
pub struct ItemLocalId {
|
||||||
derive [HashStable]
|
derive [HashStable]
|
||||||
|
|
|
@ -618,7 +618,6 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
self.print_where_clause(&exist.generics.where_clause)?;
|
self.print_where_clause(&exist.generics.where_clause)?;
|
||||||
self.s.space()?;
|
self.s.space()?;
|
||||||
self.word_space(":")?;
|
|
||||||
let mut real_bounds = Vec::with_capacity(exist.bounds.len());
|
let mut real_bounds = Vec::with_capacity(exist.bounds.len());
|
||||||
for b in exist.bounds.iter() {
|
for b in exist.bounds.iter() {
|
||||||
if let GenericBound::Trait(ref ptr, hir::TraitBoundModifier::Maybe) = *b {
|
if let GenericBound::Trait(ref ptr, hir::TraitBoundModifier::Maybe) = *b {
|
||||||
|
|
|
@ -54,21 +54,21 @@ struct AstValidator<'a> {
|
||||||
has_proc_macro_decls: bool,
|
has_proc_macro_decls: bool,
|
||||||
has_global_allocator: bool,
|
has_global_allocator: bool,
|
||||||
|
|
||||||
// Used to ban nested `impl Trait`, e.g., `impl Into<impl Debug>`.
|
/// Used to ban nested `impl Trait`, e.g., `impl Into<impl Debug>`.
|
||||||
// Nested `impl Trait` _is_ allowed in associated type position,
|
/// Nested `impl Trait` _is_ allowed in associated type position,
|
||||||
// e.g `impl Iterator<Item=impl Debug>`
|
/// e.g `impl Iterator<Item=impl Debug>`
|
||||||
outer_impl_trait: Option<OuterImplTrait>,
|
outer_impl_trait: Option<OuterImplTrait>,
|
||||||
|
|
||||||
// Used to ban `impl Trait` in path projections like `<impl Iterator>::Item`
|
/// Used to ban `impl Trait` in path projections like `<impl Iterator>::Item`
|
||||||
// or `Foo::Bar<impl Trait>`
|
/// or `Foo::Bar<impl Trait>`
|
||||||
is_impl_trait_banned: bool,
|
is_impl_trait_banned: bool,
|
||||||
|
|
||||||
// rust-lang/rust#57979: the ban of nested `impl Trait` was buggy
|
/// rust-lang/rust#57979: the ban of nested `impl Trait` was buggy
|
||||||
// until PRs #57730 and #57981 landed: it would jump directly to
|
/// until PRs #57730 and #57981 landed: it would jump directly to
|
||||||
// walk_ty rather than visit_ty (or skip recurring entirely for
|
/// walk_ty rather than visit_ty (or skip recurring entirely for
|
||||||
// impl trait in projections), and thus miss some cases. We track
|
/// impl trait in projections), and thus miss some cases. We track
|
||||||
// whether we should downgrade to a warning for short-term via
|
/// whether we should downgrade to a warning for short-term via
|
||||||
// these booleans.
|
/// these booleans.
|
||||||
warning_period_57979_didnt_record_next_impl_trait: bool,
|
warning_period_57979_didnt_record_next_impl_trait: bool,
|
||||||
warning_period_57979_impl_trait_in_proj: bool,
|
warning_period_57979_impl_trait_in_proj: bool,
|
||||||
}
|
}
|
||||||
|
|
|
@ -663,7 +663,6 @@ pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis:
|
||||||
token::NtMeta(meta) => vis.visit_meta_item(meta),
|
token::NtMeta(meta) => vis.visit_meta_item(meta),
|
||||||
token::NtPath(path) => vis.visit_path(path),
|
token::NtPath(path) => vis.visit_path(path),
|
||||||
token::NtTT(tt) => vis.visit_tt(tt),
|
token::NtTT(tt) => vis.visit_tt(tt),
|
||||||
token::NtArm(arm) => vis.visit_arm(arm),
|
|
||||||
token::NtImplItem(item) =>
|
token::NtImplItem(item) =>
|
||||||
visit_clobber(item, |item| {
|
visit_clobber(item, |item| {
|
||||||
// See reasoning above.
|
// See reasoning above.
|
||||||
|
@ -676,9 +675,6 @@ pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis:
|
||||||
vis.flat_map_trait_item(item)
|
vis.flat_map_trait_item(item)
|
||||||
.expect_one("expected visitor to produce exactly one item")
|
.expect_one("expected visitor to produce exactly one item")
|
||||||
}),
|
}),
|
||||||
token::NtGenerics(generics) => vis.visit_generics(generics),
|
|
||||||
token::NtWhereClause(where_clause) => vis.visit_where_clause(where_clause),
|
|
||||||
token::NtArg(arg) => vis.visit_arg(arg),
|
|
||||||
token::NtVis(visib) => vis.visit_vis(visib),
|
token::NtVis(visib) => vis.visit_vis(visib),
|
||||||
token::NtForeignItem(item) =>
|
token::NtForeignItem(item) =>
|
||||||
visit_clobber(item, |item| {
|
visit_clobber(item, |item| {
|
||||||
|
|
|
@ -1833,7 +1833,7 @@ impl<'a> Parser<'a> {
|
||||||
Ok(MutTy { ty: t, mutbl: mutbl })
|
Ok(MutTy { ty: t, mutbl: mutbl })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_named_argument(&mut self) -> bool {
|
fn is_named_argument(&self) -> bool {
|
||||||
let offset = match self.token {
|
let offset = match self.token {
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||||
|
@ -1881,8 +1881,6 @@ impl<'a> Parser<'a> {
|
||||||
/// This version of parse arg doesn't necessarily require identifier names.
|
/// This version of parse arg doesn't necessarily require identifier names.
|
||||||
fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool,
|
fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool,
|
||||||
allow_c_variadic: bool) -> PResult<'a, Arg> {
|
allow_c_variadic: bool) -> PResult<'a, Arg> {
|
||||||
maybe_whole!(self, NtArg, |x| x);
|
|
||||||
|
|
||||||
if let Ok(Some(_)) = self.parse_self_arg() {
|
if let Ok(Some(_)) = self.parse_self_arg() {
|
||||||
let mut err = self.struct_span_err(self.prev_span,
|
let mut err = self.struct_span_err(self.prev_span,
|
||||||
"unexpected `self` argument in function");
|
"unexpected `self` argument in function");
|
||||||
|
@ -2345,27 +2343,27 @@ impl<'a> Parser<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
|
fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
|
||||||
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
|
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
|
fn mk_unary(&self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
|
||||||
ExprKind::Unary(unop, expr)
|
ExprKind::Unary(unop, expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
fn mk_binary(&self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
||||||
ExprKind::Binary(binop, lhs, rhs)
|
ExprKind::Binary(binop, lhs, rhs)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
|
fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
|
||||||
ExprKind::Call(f, args)
|
ExprKind::Call(f, args)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
|
fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
|
||||||
ExprKind::Index(expr, idx)
|
ExprKind::Index(expr, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_range(&mut self,
|
fn mk_range(&self,
|
||||||
start: Option<P<Expr>>,
|
start: Option<P<Expr>>,
|
||||||
end: Option<P<Expr>>,
|
end: Option<P<Expr>>,
|
||||||
limits: RangeLimits)
|
limits: RangeLimits)
|
||||||
|
@ -2377,7 +2375,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_assign_op(&mut self, binop: ast::BinOp,
|
fn mk_assign_op(&self, binop: ast::BinOp,
|
||||||
lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
||||||
ExprKind::AssignOp(binop, lhs, rhs)
|
ExprKind::AssignOp(binop, lhs, rhs)
|
||||||
}
|
}
|
||||||
|
@ -2517,13 +2515,12 @@ impl<'a> Parser<'a> {
|
||||||
hi = path.span;
|
hi = path.span;
|
||||||
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
|
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
|
||||||
}
|
}
|
||||||
if self.span.rust_2018() && self.check_keyword(keywords::Async)
|
if self.span.rust_2018() && self.check_keyword(keywords::Async) {
|
||||||
{
|
return if self.is_async_block() { // check for `async {` and `async move {`
|
||||||
if self.is_async_block() { // check for `async {` and `async move {`
|
self.parse_async_block(attrs)
|
||||||
return self.parse_async_block(attrs);
|
|
||||||
} else {
|
} else {
|
||||||
return self.parse_lambda_expr(attrs);
|
self.parse_lambda_expr(attrs)
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
|
if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
|
||||||
return self.parse_lambda_expr(attrs);
|
return self.parse_lambda_expr(attrs);
|
||||||
|
@ -3448,7 +3445,8 @@ impl<'a> Parser<'a> {
|
||||||
} else {
|
} else {
|
||||||
self.restrictions
|
self.restrictions
|
||||||
};
|
};
|
||||||
if op.precedence() < min_prec {
|
let prec = op.precedence();
|
||||||
|
if prec < min_prec {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// Check for deprecated `...` syntax
|
// Check for deprecated `...` syntax
|
||||||
|
@ -3489,8 +3487,7 @@ impl<'a> Parser<'a> {
|
||||||
// We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
|
// We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
|
||||||
// two variants are handled with `parse_prefix_range_expr` call above.
|
// two variants are handled with `parse_prefix_range_expr` call above.
|
||||||
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
||||||
Some(self.parse_assoc_expr_with(op.precedence() + 1,
|
Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
|
||||||
LhsExpr::NotYetParsed)?)
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -3510,28 +3507,18 @@ impl<'a> Parser<'a> {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
let rhs = match op.fixity() {
|
let fixity = op.fixity();
|
||||||
Fixity::Right => self.with_res(
|
let prec_adjustment = match fixity {
|
||||||
restrictions - Restrictions::STMT_EXPR,
|
Fixity::Right => 0,
|
||||||
|this| {
|
Fixity::Left => 1,
|
||||||
this.parse_assoc_expr_with(op.precedence(),
|
|
||||||
LhsExpr::NotYetParsed)
|
|
||||||
}),
|
|
||||||
Fixity::Left => self.with_res(
|
|
||||||
restrictions - Restrictions::STMT_EXPR,
|
|
||||||
|this| {
|
|
||||||
this.parse_assoc_expr_with(op.precedence() + 1,
|
|
||||||
LhsExpr::NotYetParsed)
|
|
||||||
}),
|
|
||||||
// We currently have no non-associative operators that are not handled above by
|
// We currently have no non-associative operators that are not handled above by
|
||||||
// the special cases. The code is here only for future convenience.
|
// the special cases. The code is here only for future convenience.
|
||||||
Fixity::None => self.with_res(
|
Fixity::None => 1,
|
||||||
restrictions - Restrictions::STMT_EXPR,
|
};
|
||||||
|this| {
|
let rhs = self.with_res(
|
||||||
this.parse_assoc_expr_with(op.precedence() + 1,
|
restrictions - Restrictions::STMT_EXPR,
|
||||||
LhsExpr::NotYetParsed)
|
|this| this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
|
||||||
}),
|
)?;
|
||||||
}?;
|
|
||||||
|
|
||||||
// Make sure that the span of the parent node is larger than the span of lhs and rhs,
|
// Make sure that the span of the parent node is larger than the span of lhs and rhs,
|
||||||
// including the attributes.
|
// including the attributes.
|
||||||
|
@ -3577,7 +3564,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if op.fixity() == Fixity::None { break }
|
if let Fixity::None = fixity { break }
|
||||||
}
|
}
|
||||||
Ok(lhs)
|
Ok(lhs)
|
||||||
}
|
}
|
||||||
|
@ -3714,7 +3701,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Produce an error if comparison operators are chained (RFC #558).
|
/// Produce an error if comparison operators are chained (RFC #558).
|
||||||
/// We only need to check lhs, not rhs, because all comparison ops
|
/// We only need to check lhs, not rhs, because all comparison ops
|
||||||
/// have same precedence and are left-associative
|
/// have same precedence and are left-associative
|
||||||
fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: &AssocOp) {
|
fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) {
|
||||||
debug_assert!(outer_op.is_comparison(),
|
debug_assert!(outer_op.is_comparison(),
|
||||||
"check_no_chained_comparison: {:?} is not comparison",
|
"check_no_chained_comparison: {:?} is not comparison",
|
||||||
outer_op);
|
outer_op);
|
||||||
|
@ -4053,8 +4040,6 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||||
maybe_whole!(self, NtArm, |x| x);
|
|
||||||
|
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let pats = self.parse_pats()?;
|
let pats = self.parse_pats()?;
|
||||||
let guard = if self.eat_keyword(keywords::If) {
|
let guard = if self.eat_keyword(keywords::If) {
|
||||||
|
@ -5011,7 +4996,7 @@ impl<'a> Parser<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_async_block(&mut self) -> bool {
|
fn is_async_block(&self) -> bool {
|
||||||
self.token.is_keyword(keywords::Async) &&
|
self.token.is_keyword(keywords::Async) &&
|
||||||
(
|
(
|
||||||
( // `async move {`
|
( // `async move {`
|
||||||
|
@ -5023,19 +5008,19 @@ impl<'a> Parser<'a> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_async_fn(&mut self) -> bool {
|
fn is_async_fn(&self) -> bool {
|
||||||
self.token.is_keyword(keywords::Async) &&
|
self.token.is_keyword(keywords::Async) &&
|
||||||
self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
|
self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_do_catch_block(&mut self) -> bool {
|
fn is_do_catch_block(&self) -> bool {
|
||||||
self.token.is_keyword(keywords::Do) &&
|
self.token.is_keyword(keywords::Do) &&
|
||||||
self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
|
self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
|
||||||
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
|
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
|
||||||
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_try_block(&mut self) -> bool {
|
fn is_try_block(&self) -> bool {
|
||||||
self.token.is_keyword(keywords::Try) &&
|
self.token.is_keyword(keywords::Try) &&
|
||||||
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
|
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
|
||||||
self.span.rust_2018() &&
|
self.span.rust_2018() &&
|
||||||
|
@ -5057,7 +5042,7 @@ impl<'a> Parser<'a> {
|
||||||
self.look_ahead(1, |t| t.is_keyword(keywords::Type))
|
self.look_ahead(1, |t| t.is_keyword(keywords::Type))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_auto_trait_item(&mut self) -> bool {
|
fn is_auto_trait_item(&self) -> bool {
|
||||||
// auto trait
|
// auto trait
|
||||||
(self.token.is_keyword(keywords::Auto)
|
(self.token.is_keyword(keywords::Auto)
|
||||||
&& self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
|
&& self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
|
||||||
|
@ -5319,7 +5304,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if this expression is a successfully parsed statement.
|
/// Checks if this expression is a successfully parsed statement.
|
||||||
fn expr_is_complete(&mut self, e: &Expr) -> bool {
|
fn expr_is_complete(&self, e: &Expr) -> bool {
|
||||||
self.restrictions.contains(Restrictions::STMT_EXPR) &&
|
self.restrictions.contains(Restrictions::STMT_EXPR) &&
|
||||||
!classify::expr_requires_semi_to_be_stmt(e)
|
!classify::expr_requires_semi_to_be_stmt(e)
|
||||||
}
|
}
|
||||||
|
@ -5789,8 +5774,6 @@ impl<'a> Parser<'a> {
|
||||||
/// | ( < lifetimes , typaramseq ( , )? > )
|
/// | ( < lifetimes , typaramseq ( , )? > )
|
||||||
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
|
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
|
||||||
fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
|
fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
|
||||||
maybe_whole!(self, NtGenerics, |x| x);
|
|
||||||
|
|
||||||
let span_lo = self.span;
|
let span_lo = self.span;
|
||||||
if self.eat_lt() {
|
if self.eat_lt() {
|
||||||
let params = self.parse_generic_params()?;
|
let params = self.parse_generic_params()?;
|
||||||
|
@ -6043,8 +6026,6 @@ impl<'a> Parser<'a> {
|
||||||
/// where T : Trait<U, V> + 'b, 'a : 'b
|
/// where T : Trait<U, V> + 'b, 'a : 'b
|
||||||
/// ```
|
/// ```
|
||||||
fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
|
fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
|
||||||
maybe_whole!(self, NtWhereClause, |x| x);
|
|
||||||
|
|
||||||
let mut where_clause = WhereClause {
|
let mut where_clause = WhereClause {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
predicates: Vec::new(),
|
predicates: Vec::new(),
|
||||||
|
@ -6391,7 +6372,7 @@ impl<'a> Parser<'a> {
|
||||||
Ok((id, generics))
|
Ok((id, generics))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
|
fn mk_item(&self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
|
||||||
attrs: Vec<Attribute>) -> P<Item> {
|
attrs: Vec<Attribute>) -> P<Item> {
|
||||||
P(Item {
|
P(Item {
|
||||||
ident,
|
ident,
|
||||||
|
@ -6423,7 +6404,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Returns `true` if we are looking at `const ID`
|
/// Returns `true` if we are looking at `const ID`
|
||||||
/// (returns `false` for things like `const fn`, etc.).
|
/// (returns `false` for things like `const fn`, etc.).
|
||||||
fn is_const_item(&mut self) -> bool {
|
fn is_const_item(&self) -> bool {
|
||||||
self.token.is_keyword(keywords::Const) &&
|
self.token.is_keyword(keywords::Const) &&
|
||||||
!self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
|
!self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
|
||||||
!self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
|
!self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
|
||||||
|
@ -6531,7 +6512,7 @@ impl<'a> Parser<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) {
|
fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) {
|
||||||
match *vis {
|
match *vis {
|
||||||
VisibilityKind::Inherited => {}
|
VisibilityKind::Inherited => {}
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -6560,7 +6541,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn missing_assoc_item_kind_err(&mut self, item_type: &str, prev_span: Span)
|
fn missing_assoc_item_kind_err(&self, item_type: &str, prev_span: Span)
|
||||||
-> DiagnosticBuilder<'a>
|
-> DiagnosticBuilder<'a>
|
||||||
{
|
{
|
||||||
let expected_kinds = if item_type == "extern" {
|
let expected_kinds = if item_type == "extern" {
|
||||||
|
|
|
@ -597,14 +597,12 @@ pub enum Nonterminal {
|
||||||
NtPath(ast::Path),
|
NtPath(ast::Path),
|
||||||
NtVis(ast::Visibility),
|
NtVis(ast::Visibility),
|
||||||
NtTT(TokenTree),
|
NtTT(TokenTree),
|
||||||
// These are not exposed to macros, but are used by quasiquote.
|
// Used only for passing items to proc macro attributes (they are not
|
||||||
NtArm(ast::Arm),
|
// strictly necessary for that, `Annotatable` can be converted into
|
||||||
NtImplItem(ast::ImplItem),
|
// tokens directly, but doing that naively regresses pretty-printing).
|
||||||
NtTraitItem(ast::TraitItem),
|
NtTraitItem(ast::TraitItem),
|
||||||
|
NtImplItem(ast::ImplItem),
|
||||||
NtForeignItem(ast::ForeignItem),
|
NtForeignItem(ast::ForeignItem),
|
||||||
NtGenerics(ast::Generics),
|
|
||||||
NtWhereClause(ast::WhereClause),
|
|
||||||
NtArg(ast::Arg),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for Nonterminal {
|
impl PartialEq for Nonterminal {
|
||||||
|
@ -637,13 +635,9 @@ impl fmt::Debug for Nonterminal {
|
||||||
NtMeta(..) => f.pad("NtMeta(..)"),
|
NtMeta(..) => f.pad("NtMeta(..)"),
|
||||||
NtPath(..) => f.pad("NtPath(..)"),
|
NtPath(..) => f.pad("NtPath(..)"),
|
||||||
NtTT(..) => f.pad("NtTT(..)"),
|
NtTT(..) => f.pad("NtTT(..)"),
|
||||||
NtArm(..) => f.pad("NtArm(..)"),
|
|
||||||
NtImplItem(..) => f.pad("NtImplItem(..)"),
|
NtImplItem(..) => f.pad("NtImplItem(..)"),
|
||||||
NtTraitItem(..) => f.pad("NtTraitItem(..)"),
|
NtTraitItem(..) => f.pad("NtTraitItem(..)"),
|
||||||
NtForeignItem(..) => f.pad("NtForeignItem(..)"),
|
NtForeignItem(..) => f.pad("NtForeignItem(..)"),
|
||||||
NtGenerics(..) => f.pad("NtGenerics(..)"),
|
|
||||||
NtWhereClause(..) => f.pad("NtWhereClause(..)"),
|
|
||||||
NtArg(..) => f.pad("NtArg(..)"),
|
|
||||||
NtVis(..) => f.pad("NtVis(..)"),
|
NtVis(..) => f.pad("NtVis(..)"),
|
||||||
NtLifetime(..) => f.pad("NtLifetime(..)"),
|
NtLifetime(..) => f.pad("NtLifetime(..)"),
|
||||||
}
|
}
|
||||||
|
|
|
@ -263,12 +263,8 @@ pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
||||||
token::NtLifetime(e) => ident_to_string(e),
|
token::NtLifetime(e) => ident_to_string(e),
|
||||||
token::NtLiteral(ref e) => expr_to_string(e),
|
token::NtLiteral(ref e) => expr_to_string(e),
|
||||||
token::NtTT(ref tree) => tt_to_string(tree.clone()),
|
token::NtTT(ref tree) => tt_to_string(tree.clone()),
|
||||||
token::NtArm(ref e) => arm_to_string(e),
|
|
||||||
token::NtImplItem(ref e) => impl_item_to_string(e),
|
token::NtImplItem(ref e) => impl_item_to_string(e),
|
||||||
token::NtTraitItem(ref e) => trait_item_to_string(e),
|
token::NtTraitItem(ref e) => trait_item_to_string(e),
|
||||||
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
|
|
||||||
token::NtWhereClause(ref e) => where_clause_to_string(e),
|
|
||||||
token::NtArg(ref e) => arg_to_string(e),
|
|
||||||
token::NtVis(ref e) => vis_to_string(e),
|
token::NtVis(ref e) => vis_to_string(e),
|
||||||
token::NtForeignItem(ref e) => foreign_item_to_string(e),
|
token::NtForeignItem(ref e) => foreign_item_to_string(e),
|
||||||
}
|
}
|
||||||
|
|
11
src/test/ui/issues/issue-60662.rs
Normal file
11
src/test/ui/issues/issue-60662.rs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
// compile-pass
|
||||||
|
// compile-flags: -Z unpretty=hir
|
||||||
|
|
||||||
|
#![feature(existential_type)]
|
||||||
|
|
||||||
|
trait Animal {
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
pub existential type ServeFut: Animal;
|
||||||
|
}
|
14
src/test/ui/issues/issue-60662.stdout
Normal file
14
src/test/ui/issues/issue-60662.stdout
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
// compile-pass
|
||||||
|
// compile-flags: -Z unpretty=hir
|
||||||
|
|
||||||
|
#![feature(existential_type)]
|
||||||
|
#[prelude_import]
|
||||||
|
use ::std::prelude::v1::*;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate std;
|
||||||
|
|
||||||
|
trait Animal { }
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
pub existential type ServeFut : Animal;
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue