Replace ast::TokenKind::BinOp{,Eq}
and remove BinOpToken
.
`BinOpToken` is badly named, because it only covers the assignable binary ops and excludes comparisons and `&&`/`||`. Its use in `ast::TokenKind` does allow a small amount of code sharing, but it's a clumsy factoring. This commit removes `ast::TokenKind::BinOp{,Eq}`, replacing each one with 10 individual variants. This makes `ast::TokenKind` more similar to `rustc_lexer::TokenKind`, which has individual variants for all operators. Although the number of lines of code increases, the number of chars decreases due to the frequent use of shorter names like `token::Plus` instead of `token::BinOp(BinOpToken::Plus)`.
This commit is contained in:
parent
7c4a55c2ac
commit
2a1e2e9632
19 changed files with 352 additions and 309 deletions
|
@ -2,7 +2,6 @@ use std::borrow::Cow;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub use BinOpToken::*;
|
|
||||||
pub use LitKind::*;
|
pub use LitKind::*;
|
||||||
pub use Nonterminal::*;
|
pub use Nonterminal::*;
|
||||||
pub use NtExprKind::*;
|
pub use NtExprKind::*;
|
||||||
|
@ -26,21 +25,6 @@ pub enum CommentKind {
|
||||||
Block,
|
Block,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Encodable, Decodable, Hash, Debug, Copy)]
|
|
||||||
#[derive(HashStable_Generic)]
|
|
||||||
pub enum BinOpToken {
|
|
||||||
Plus,
|
|
||||||
Minus,
|
|
||||||
Star,
|
|
||||||
Slash,
|
|
||||||
Percent,
|
|
||||||
Caret,
|
|
||||||
And,
|
|
||||||
Or,
|
|
||||||
Shl,
|
|
||||||
Shr,
|
|
||||||
}
|
|
||||||
|
|
||||||
// This type must not implement `Hash` due to the unusual `PartialEq` impl below.
|
// This type must not implement `Hash` due to the unusual `PartialEq` impl below.
|
||||||
#[derive(Copy, Clone, Debug, Encodable, Decodable, HashStable_Generic)]
|
#[derive(Copy, Clone, Debug, Encodable, Decodable, HashStable_Generic)]
|
||||||
pub enum InvisibleOrigin {
|
pub enum InvisibleOrigin {
|
||||||
|
@ -379,8 +363,46 @@ pub enum TokenKind {
|
||||||
Not,
|
Not,
|
||||||
/// `~`
|
/// `~`
|
||||||
Tilde,
|
Tilde,
|
||||||
BinOp(BinOpToken),
|
// `+`
|
||||||
BinOpEq(BinOpToken),
|
Plus,
|
||||||
|
// `-`
|
||||||
|
Minus,
|
||||||
|
// `*`
|
||||||
|
Star,
|
||||||
|
// `/`
|
||||||
|
Slash,
|
||||||
|
// `%`
|
||||||
|
Percent,
|
||||||
|
// `^`
|
||||||
|
Caret,
|
||||||
|
// `&`
|
||||||
|
And,
|
||||||
|
// `|`
|
||||||
|
Or,
|
||||||
|
// `<<`
|
||||||
|
Shl,
|
||||||
|
// `>>`
|
||||||
|
Shr,
|
||||||
|
// `+=`
|
||||||
|
PlusEq,
|
||||||
|
// `-=`
|
||||||
|
MinusEq,
|
||||||
|
// `*=`
|
||||||
|
StarEq,
|
||||||
|
// `/=`
|
||||||
|
SlashEq,
|
||||||
|
// `%=`
|
||||||
|
PercentEq,
|
||||||
|
// `^=`
|
||||||
|
CaretEq,
|
||||||
|
// `&=`
|
||||||
|
AndEq,
|
||||||
|
// `|=`
|
||||||
|
OrEq,
|
||||||
|
// `<<=`
|
||||||
|
ShlEq,
|
||||||
|
// `>>=`
|
||||||
|
ShrEq,
|
||||||
|
|
||||||
/* Structural symbols */
|
/* Structural symbols */
|
||||||
/// `@`
|
/// `@`
|
||||||
|
@ -502,29 +524,29 @@ impl TokenKind {
|
||||||
(EqEq, 1) => (Eq, Eq),
|
(EqEq, 1) => (Eq, Eq),
|
||||||
(Ne, 1) => (Not, Eq),
|
(Ne, 1) => (Not, Eq),
|
||||||
(Ge, 1) => (Gt, Eq),
|
(Ge, 1) => (Gt, Eq),
|
||||||
(AndAnd, 1) => (BinOp(And), BinOp(And)),
|
(AndAnd, 1) => (And, And),
|
||||||
(OrOr, 1) => (BinOp(Or), BinOp(Or)),
|
(OrOr, 1) => (Or, Or),
|
||||||
(BinOp(Shl), 1) => (Lt, Lt),
|
(Shl, 1) => (Lt, Lt),
|
||||||
(BinOp(Shr), 1) => (Gt, Gt),
|
(Shr, 1) => (Gt, Gt),
|
||||||
(BinOpEq(Plus), 1) => (BinOp(Plus), Eq),
|
(PlusEq, 1) => (Plus, Eq),
|
||||||
(BinOpEq(Minus), 1) => (BinOp(Minus), Eq),
|
(MinusEq, 1) => (Minus, Eq),
|
||||||
(BinOpEq(Star), 1) => (BinOp(Star), Eq),
|
(StarEq, 1) => (Star, Eq),
|
||||||
(BinOpEq(Slash), 1) => (BinOp(Slash), Eq),
|
(SlashEq, 1) => (Slash, Eq),
|
||||||
(BinOpEq(Percent), 1) => (BinOp(Percent), Eq),
|
(PercentEq, 1) => (Percent, Eq),
|
||||||
(BinOpEq(Caret), 1) => (BinOp(Caret), Eq),
|
(CaretEq, 1) => (Caret, Eq),
|
||||||
(BinOpEq(And), 1) => (BinOp(And), Eq),
|
(AndEq, 1) => (And, Eq),
|
||||||
(BinOpEq(Or), 1) => (BinOp(Or), Eq),
|
(OrEq, 1) => (Or, Eq),
|
||||||
(BinOpEq(Shl), 1) => (Lt, Le), // `<` + `<=`
|
(ShlEq, 1) => (Lt, Le), // `<` + `<=`
|
||||||
(BinOpEq(Shl), 2) => (BinOp(Shl), Eq), // `<<` + `=`
|
(ShlEq, 2) => (Shl, Eq), // `<<` + `=`
|
||||||
(BinOpEq(Shr), 1) => (Gt, Ge), // `>` + `>=`
|
(ShrEq, 1) => (Gt, Ge), // `>` + `>=`
|
||||||
(BinOpEq(Shr), 2) => (BinOp(Shr), Eq), // `>>` + `=`
|
(ShrEq, 2) => (Shr, Eq), // `>>` + `=`
|
||||||
(DotDot, 1) => (Dot, Dot),
|
(DotDot, 1) => (Dot, Dot),
|
||||||
(DotDotDot, 1) => (Dot, DotDot), // `.` + `..`
|
(DotDotDot, 1) => (Dot, DotDot), // `.` + `..`
|
||||||
(DotDotDot, 2) => (DotDot, Dot), // `..` + `.`
|
(DotDotDot, 2) => (DotDot, Dot), // `..` + `.`
|
||||||
(DotDotEq, 2) => (DotDot, Eq),
|
(DotDotEq, 2) => (DotDot, Eq),
|
||||||
(PathSep, 1) => (Colon, Colon),
|
(PathSep, 1) => (Colon, Colon),
|
||||||
(RArrow, 1) => (BinOp(Minus), Gt),
|
(RArrow, 1) => (Minus, Gt),
|
||||||
(LArrow, 1) => (Lt, BinOp(Minus)),
|
(LArrow, 1) => (Lt, Minus),
|
||||||
(FatArrow, 1) => (Eq, Gt),
|
(FatArrow, 1) => (Eq, Gt),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
})
|
})
|
||||||
|
@ -543,7 +565,7 @@ impl TokenKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn should_end_const_arg(&self) -> bool {
|
pub fn should_end_const_arg(&self) -> bool {
|
||||||
matches!(self, Gt | Ge | BinOp(Shr) | BinOpEq(Shr))
|
matches!(self, Gt | Ge | Shr | ShrEq)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -582,11 +604,11 @@ impl Token {
|
||||||
|
|
||||||
pub fn is_punct(&self) -> bool {
|
pub fn is_punct(&self) -> bool {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_)
|
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | Plus | Minus
|
||||||
| BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon
|
| Star | Slash | Percent | Caret | And | Or | Shl | Shr | PlusEq | MinusEq | StarEq
|
||||||
| PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => {
|
| SlashEq | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | Dot | DotDot
|
||||||
true
|
| DotDotDot | DotDotEq | Comma | Semi | Colon | PathSep | RArrow | LArrow
|
||||||
}
|
| FatArrow | Pound | Dollar | Question | SingleQuote => true,
|
||||||
|
|
||||||
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
|
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
|
||||||
| NtIdent(..) | Lifetime(..) | NtLifetime(..) | Interpolated(..) | Eof => false,
|
| NtIdent(..) | Lifetime(..) | NtLifetime(..) | Interpolated(..) | Eof => false,
|
||||||
|
@ -594,7 +616,7 @@ impl Token {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_like_plus(&self) -> bool {
|
pub fn is_like_plus(&self) -> bool {
|
||||||
matches!(self.kind, BinOp(Plus) | BinOpEq(Plus))
|
matches!(self.kind, Plus | PlusEq)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the token can appear at the start of an expression.
|
/// Returns `true` if the token can appear at the start of an expression.
|
||||||
|
@ -609,14 +631,14 @@ impl Token {
|
||||||
OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block
|
OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block
|
||||||
Literal(..) | // literal
|
Literal(..) | // literal
|
||||||
Not | // operator not
|
Not | // operator not
|
||||||
BinOp(Minus) | // unary minus
|
Minus | // unary minus
|
||||||
BinOp(Star) | // dereference
|
Star | // dereference
|
||||||
BinOp(Or) | OrOr | // closure
|
Or | OrOr | // closure
|
||||||
BinOp(And) | // reference
|
And | // reference
|
||||||
AndAnd | // double reference
|
AndAnd | // double reference
|
||||||
// DotDotDot is no longer supported, but we need some way to display the error
|
// DotDotDot is no longer supported, but we need some way to display the error
|
||||||
DotDot | DotDotDot | DotDotEq | // range notation
|
DotDot | DotDotDot | DotDotEq | // range notation
|
||||||
Lt | BinOp(Shl) | // associated path
|
Lt | Shl | // associated path
|
||||||
PathSep | // global path
|
PathSep | // global path
|
||||||
Lifetime(..) | // labeled loop
|
Lifetime(..) | // labeled loop
|
||||||
Pound => true, // expression attributes
|
Pound => true, // expression attributes
|
||||||
|
@ -645,17 +667,16 @@ impl Token {
|
||||||
Ident(..) | NtIdent(..) |
|
Ident(..) | NtIdent(..) |
|
||||||
OpenDelim(Delimiter::Parenthesis) | // tuple pattern
|
OpenDelim(Delimiter::Parenthesis) | // tuple pattern
|
||||||
OpenDelim(Delimiter::Bracket) | // slice pattern
|
OpenDelim(Delimiter::Bracket) | // slice pattern
|
||||||
BinOp(And) | // reference
|
And | // reference
|
||||||
BinOp(Minus) | // negative literal
|
Minus | // negative literal
|
||||||
AndAnd | // double reference
|
AndAnd | // double reference
|
||||||
Literal(_) | // literal
|
Literal(_) | // literal
|
||||||
DotDot | // range pattern (future compat)
|
DotDot | // range pattern (future compat)
|
||||||
DotDotDot | // range pattern (future compat)
|
DotDotDot | // range pattern (future compat)
|
||||||
PathSep | // path
|
PathSep | // path
|
||||||
Lt | // path (UFCS constant)
|
Lt | // path (UFCS constant)
|
||||||
BinOp(Shl) => true, // path (double UFCS)
|
Shl => true, // path (double UFCS)
|
||||||
// leading vert `|` or-pattern
|
Or => matches!(pat_kind, PatWithOr), // leading vert `|` or-pattern
|
||||||
BinOp(Or) => matches!(pat_kind, PatWithOr),
|
|
||||||
Interpolated(nt) =>
|
Interpolated(nt) =>
|
||||||
matches!(&**nt,
|
matches!(&**nt,
|
||||||
| NtExpr(..)
|
| NtExpr(..)
|
||||||
|
@ -676,18 +697,18 @@ impl Token {
|
||||||
/// Returns `true` if the token can appear at the start of a type.
|
/// Returns `true` if the token can appear at the start of a type.
|
||||||
pub fn can_begin_type(&self) -> bool {
|
pub fn can_begin_type(&self) -> bool {
|
||||||
match self.uninterpolate().kind {
|
match self.uninterpolate().kind {
|
||||||
Ident(name, is_raw) =>
|
Ident(name, is_raw) =>
|
||||||
ident_can_begin_type(name, self.span, is_raw), // type name or keyword
|
ident_can_begin_type(name, self.span, is_raw), // type name or keyword
|
||||||
OpenDelim(Delimiter::Parenthesis) | // tuple
|
OpenDelim(Delimiter::Parenthesis) | // tuple
|
||||||
OpenDelim(Delimiter::Bracket) | // array
|
OpenDelim(Delimiter::Bracket) | // array
|
||||||
Not | // never
|
Not | // never
|
||||||
BinOp(Star) | // raw pointer
|
Star | // raw pointer
|
||||||
BinOp(And) | // reference
|
And | // reference
|
||||||
AndAnd | // double reference
|
AndAnd | // double reference
|
||||||
Question | // maybe bound in trait object
|
Question | // maybe bound in trait object
|
||||||
Lifetime(..) | // lifetime bound in trait object
|
Lifetime(..) | // lifetime bound in trait object
|
||||||
Lt | BinOp(Shl) | // associated path
|
Lt | Shl | // associated path
|
||||||
PathSep => true, // global path
|
PathSep => true, // global path
|
||||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||||
MetaVarKind::Ty { .. } |
|
MetaVarKind::Ty { .. } |
|
||||||
MetaVarKind::Path
|
MetaVarKind::Path
|
||||||
|
@ -701,7 +722,7 @@ impl Token {
|
||||||
/// Returns `true` if the token can appear at the start of a const param.
|
/// Returns `true` if the token can appear at the start of a const param.
|
||||||
pub fn can_begin_const_arg(&self) -> bool {
|
pub fn can_begin_const_arg(&self) -> bool {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
OpenDelim(Delimiter::Brace) | Literal(..) | BinOp(Minus) => true,
|
OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true,
|
||||||
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
||||||
Interpolated(ref nt) => matches!(&**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
|
Interpolated(ref nt) => matches!(&**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
|
||||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||||
|
@ -750,7 +771,7 @@ impl Token {
|
||||||
/// Keep this in sync with and `Lit::from_token`, excluding unary negation.
|
/// Keep this in sync with and `Lit::from_token`, excluding unary negation.
|
||||||
pub fn can_begin_literal_maybe_minus(&self) -> bool {
|
pub fn can_begin_literal_maybe_minus(&self) -> bool {
|
||||||
match self.uninterpolate().kind {
|
match self.uninterpolate().kind {
|
||||||
Literal(..) | BinOp(Minus) => true,
|
Literal(..) | Minus => true,
|
||||||
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
||||||
Interpolated(ref nt) => match &**nt {
|
Interpolated(ref nt) => match &**nt {
|
||||||
NtLiteral(_) => true,
|
NtLiteral(_) => true,
|
||||||
|
@ -875,7 +896,7 @@ impl Token {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_qpath_start(&self) -> bool {
|
pub fn is_qpath_start(&self) -> bool {
|
||||||
self == &Lt || self == &BinOp(Shl)
|
self == &Lt || self == &Shl
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_path_start(&self) -> bool {
|
pub fn is_path_start(&self) -> bool {
|
||||||
|
@ -967,59 +988,82 @@ impl Token {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn glue(&self, joint: &Token) -> Option<Token> {
|
pub fn glue(&self, joint: &Token) -> Option<Token> {
|
||||||
let kind = match self.kind {
|
let kind = match (&self.kind, &joint.kind) {
|
||||||
Eq => match joint.kind {
|
(Eq, Eq) => EqEq,
|
||||||
Eq => EqEq,
|
(Eq, Gt) => FatArrow,
|
||||||
Gt => FatArrow,
|
(Eq, _) => return None,
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
Lt => match joint.kind {
|
|
||||||
Eq => Le,
|
|
||||||
Lt => BinOp(Shl),
|
|
||||||
Le => BinOpEq(Shl),
|
|
||||||
BinOp(Minus) => LArrow,
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
Gt => match joint.kind {
|
|
||||||
Eq => Ge,
|
|
||||||
Gt => BinOp(Shr),
|
|
||||||
Ge => BinOpEq(Shr),
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
Not => match joint.kind {
|
|
||||||
Eq => Ne,
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
BinOp(op) => match joint.kind {
|
|
||||||
Eq => BinOpEq(op),
|
|
||||||
BinOp(And) if op == And => AndAnd,
|
|
||||||
BinOp(Or) if op == Or => OrOr,
|
|
||||||
Gt if op == Minus => RArrow,
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
Dot => match joint.kind {
|
|
||||||
Dot => DotDot,
|
|
||||||
DotDot => DotDotDot,
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
DotDot => match joint.kind {
|
|
||||||
Dot => DotDotDot,
|
|
||||||
Eq => DotDotEq,
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
Colon => match joint.kind {
|
|
||||||
Colon => PathSep,
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
SingleQuote => match joint.kind {
|
|
||||||
Ident(name, is_raw) => Lifetime(Symbol::intern(&format!("'{name}")), is_raw),
|
|
||||||
_ => return None,
|
|
||||||
},
|
|
||||||
|
|
||||||
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot
|
(Lt, Eq) => Le,
|
||||||
| DotDotEq | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar
|
(Lt, Lt) => Shl,
|
||||||
| Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
|
(Lt, Le) => ShlEq,
|
||||||
| Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof => {
|
(Lt, Minus) => LArrow,
|
||||||
|
(Lt, _) => return None,
|
||||||
|
|
||||||
|
(Gt, Eq) => Ge,
|
||||||
|
(Gt, Gt) => Shr,
|
||||||
|
(Gt, Ge) => ShrEq,
|
||||||
|
(Gt, _) => return None,
|
||||||
|
|
||||||
|
(Not, Eq) => Ne,
|
||||||
|
(Not, _) => return None,
|
||||||
|
|
||||||
|
(Plus, Eq) => PlusEq,
|
||||||
|
(Plus, _) => return None,
|
||||||
|
|
||||||
|
(Minus, Eq) => MinusEq,
|
||||||
|
(Minus, Gt) => RArrow,
|
||||||
|
(Minus, _) => return None,
|
||||||
|
|
||||||
|
(Star, Eq) => StarEq,
|
||||||
|
(Star, _) => return None,
|
||||||
|
|
||||||
|
(Slash, Eq) => SlashEq,
|
||||||
|
(Slash, _) => return None,
|
||||||
|
|
||||||
|
(Percent, Eq) => PercentEq,
|
||||||
|
(Percent, _) => return None,
|
||||||
|
|
||||||
|
(Caret, Eq) => CaretEq,
|
||||||
|
(Caret, _) => return None,
|
||||||
|
|
||||||
|
(And, Eq) => AndEq,
|
||||||
|
(And, And) => AndAnd,
|
||||||
|
(And, _) => return None,
|
||||||
|
|
||||||
|
(Or, Eq) => OrEq,
|
||||||
|
(Or, Or) => OrOr,
|
||||||
|
(Or, _) => return None,
|
||||||
|
|
||||||
|
(Shl, Eq) => ShlEq,
|
||||||
|
(Shl, _) => return None,
|
||||||
|
|
||||||
|
(Shr, Eq) => ShrEq,
|
||||||
|
(Shr, _) => return None,
|
||||||
|
|
||||||
|
(Dot, Dot) => DotDot,
|
||||||
|
(Dot, DotDot) => DotDotDot,
|
||||||
|
(Dot, _) => return None,
|
||||||
|
|
||||||
|
(DotDot, Dot) => DotDotDot,
|
||||||
|
(DotDot, Eq) => DotDotEq,
|
||||||
|
(DotDot, _) => return None,
|
||||||
|
|
||||||
|
(Colon, Colon) => PathSep,
|
||||||
|
(Colon, _) => return None,
|
||||||
|
|
||||||
|
(SingleQuote, Ident(name, is_raw)) => {
|
||||||
|
Lifetime(Symbol::intern(&format!("'{name}")), *is_raw)
|
||||||
|
}
|
||||||
|
(SingleQuote, _) => return None,
|
||||||
|
|
||||||
|
(
|
||||||
|
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | PlusEq | MinusEq | StarEq | SlashEq
|
||||||
|
| PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq
|
||||||
|
| Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question
|
||||||
|
| OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
|
||||||
|
| Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof,
|
||||||
|
_,
|
||||||
|
) => {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use rustc_span::kw;
|
use rustc_span::kw;
|
||||||
|
|
||||||
use crate::ast::{self, BinOpKind, RangeLimits};
|
use crate::ast::{self, BinOpKind, RangeLimits};
|
||||||
use crate::token::{self, BinOpToken, Token};
|
use crate::token::{self, Token};
|
||||||
|
|
||||||
/// Associative operator.
|
/// Associative operator.
|
||||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||||
|
@ -34,26 +34,26 @@ impl AssocOp {
|
||||||
use AssocOp::*;
|
use AssocOp::*;
|
||||||
match t.kind {
|
match t.kind {
|
||||||
token::Eq => Some(Assign),
|
token::Eq => Some(Assign),
|
||||||
token::BinOp(BinOpToken::Plus) => Some(Binary(BinOpKind::Add)),
|
token::Plus => Some(Binary(BinOpKind::Add)),
|
||||||
token::BinOp(BinOpToken::Minus) => Some(Binary(BinOpKind::Sub)),
|
token::Minus => Some(Binary(BinOpKind::Sub)),
|
||||||
token::BinOp(BinOpToken::Star) => Some(Binary(BinOpKind::Mul)),
|
token::Star => Some(Binary(BinOpKind::Mul)),
|
||||||
token::BinOp(BinOpToken::Slash) => Some(Binary(BinOpKind::Div)),
|
token::Slash => Some(Binary(BinOpKind::Div)),
|
||||||
token::BinOp(BinOpToken::Percent) => Some(Binary(BinOpKind::Rem)),
|
token::Percent => Some(Binary(BinOpKind::Rem)),
|
||||||
token::BinOp(BinOpToken::Caret) => Some(Binary(BinOpKind::BitXor)),
|
token::Caret => Some(Binary(BinOpKind::BitXor)),
|
||||||
token::BinOp(BinOpToken::And) => Some(Binary(BinOpKind::BitAnd)),
|
token::And => Some(Binary(BinOpKind::BitAnd)),
|
||||||
token::BinOp(BinOpToken::Or) => Some(Binary(BinOpKind::BitOr)),
|
token::Or => Some(Binary(BinOpKind::BitOr)),
|
||||||
token::BinOp(BinOpToken::Shl) => Some(Binary(BinOpKind::Shl)),
|
token::Shl => Some(Binary(BinOpKind::Shl)),
|
||||||
token::BinOp(BinOpToken::Shr) => Some(Binary(BinOpKind::Shr)),
|
token::Shr => Some(Binary(BinOpKind::Shr)),
|
||||||
token::BinOpEq(BinOpToken::Plus) => Some(AssignOp(BinOpKind::Add)),
|
token::PlusEq => Some(AssignOp(BinOpKind::Add)),
|
||||||
token::BinOpEq(BinOpToken::Minus) => Some(AssignOp(BinOpKind::Sub)),
|
token::MinusEq => Some(AssignOp(BinOpKind::Sub)),
|
||||||
token::BinOpEq(BinOpToken::Star) => Some(AssignOp(BinOpKind::Mul)),
|
token::StarEq => Some(AssignOp(BinOpKind::Mul)),
|
||||||
token::BinOpEq(BinOpToken::Slash) => Some(AssignOp(BinOpKind::Div)),
|
token::SlashEq => Some(AssignOp(BinOpKind::Div)),
|
||||||
token::BinOpEq(BinOpToken::Percent) => Some(AssignOp(BinOpKind::Rem)),
|
token::PercentEq => Some(AssignOp(BinOpKind::Rem)),
|
||||||
token::BinOpEq(BinOpToken::Caret) => Some(AssignOp(BinOpKind::BitXor)),
|
token::CaretEq => Some(AssignOp(BinOpKind::BitXor)),
|
||||||
token::BinOpEq(BinOpToken::And) => Some(AssignOp(BinOpKind::BitAnd)),
|
token::AndEq => Some(AssignOp(BinOpKind::BitAnd)),
|
||||||
token::BinOpEq(BinOpToken::Or) => Some(AssignOp(BinOpKind::BitOr)),
|
token::OrEq => Some(AssignOp(BinOpKind::BitOr)),
|
||||||
token::BinOpEq(BinOpToken::Shl) => Some(AssignOp(BinOpKind::Shl)),
|
token::ShlEq => Some(AssignOp(BinOpKind::Shl)),
|
||||||
token::BinOpEq(BinOpToken::Shr) => Some(AssignOp(BinOpKind::Shr)),
|
token::ShrEq => Some(AssignOp(BinOpKind::Shr)),
|
||||||
token::Lt => Some(Binary(BinOpKind::Lt)),
|
token::Lt => Some(Binary(BinOpKind::Lt)),
|
||||||
token::Le => Some(Binary(BinOpKind::Le)),
|
token::Le => Some(Binary(BinOpKind::Le)),
|
||||||
token::Ge => Some(Binary(BinOpKind::Ge)),
|
token::Ge => Some(Binary(BinOpKind::Ge)),
|
||||||
|
|
|
@ -11,9 +11,7 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::attr::AttrIdGenerator;
|
use rustc_ast::attr::AttrIdGenerator;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{
|
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
|
||||||
self, BinOpToken, CommentKind, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind,
|
|
||||||
};
|
|
||||||
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast::util::classify;
|
use rustc_ast::util::classify;
|
||||||
use rustc_ast::util::comments::{Comment, CommentStyle};
|
use rustc_ast::util::comments::{Comment, CommentStyle};
|
||||||
|
@ -344,21 +342,6 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn binop_to_string(op: BinOpToken) -> &'static str {
|
|
||||||
match op {
|
|
||||||
token::Plus => "+",
|
|
||||||
token::Minus => "-",
|
|
||||||
token::Star => "*",
|
|
||||||
token::Slash => "/",
|
|
||||||
token::Percent => "%",
|
|
||||||
token::Caret => "^",
|
|
||||||
token::And => "&",
|
|
||||||
token::Or => "|",
|
|
||||||
token::Shl => "<<",
|
|
||||||
token::Shr => ">>",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn doc_comment_to_string(
|
pub fn doc_comment_to_string(
|
||||||
comment_kind: CommentKind,
|
comment_kind: CommentKind,
|
||||||
attr_style: ast::AttrStyle,
|
attr_style: ast::AttrStyle,
|
||||||
|
@ -917,8 +900,26 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||||
token::Tilde => "~".into(),
|
token::Tilde => "~".into(),
|
||||||
token::OrOr => "||".into(),
|
token::OrOr => "||".into(),
|
||||||
token::AndAnd => "&&".into(),
|
token::AndAnd => "&&".into(),
|
||||||
token::BinOp(op) => binop_to_string(op).into(),
|
token::Plus => "+".into(),
|
||||||
token::BinOpEq(op) => format!("{}=", binop_to_string(op)).into(),
|
token::Minus => "-".into(),
|
||||||
|
token::Star => "*".into(),
|
||||||
|
token::Slash => "/".into(),
|
||||||
|
token::Percent => "%".into(),
|
||||||
|
token::Caret => "^".into(),
|
||||||
|
token::And => "&".into(),
|
||||||
|
token::Or => "|".into(),
|
||||||
|
token::Shl => "<<".into(),
|
||||||
|
token::Shr => ">>".into(),
|
||||||
|
token::PlusEq => "+=".into(),
|
||||||
|
token::MinusEq => "-=".into(),
|
||||||
|
token::StarEq => "*=".into(),
|
||||||
|
token::SlashEq => "/=".into(),
|
||||||
|
token::PercentEq => "%=".into(),
|
||||||
|
token::CaretEq => "^=".into(),
|
||||||
|
token::AndEq => "&=".into(),
|
||||||
|
token::OrEq => "|=".into(),
|
||||||
|
token::ShlEq => "<<=".into(),
|
||||||
|
token::ShrEq => ">>=".into(),
|
||||||
|
|
||||||
/* Structural symbols */
|
/* Structural symbols */
|
||||||
token::At => "@".into(),
|
token::At => "@".into(),
|
||||||
|
|
|
@ -1135,7 +1135,7 @@ fn check_matcher_core<'tt>(
|
||||||
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
|
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
|
||||||
&& matches!(
|
&& matches!(
|
||||||
next_token,
|
next_token,
|
||||||
TokenTree::Token(token) if *token == BinOp(token::BinOpToken::Or)
|
TokenTree::Token(token) if *token == token::Or
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
|
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
|
||||||
|
@ -1177,7 +1177,7 @@ fn check_matcher_core<'tt>(
|
||||||
|
|
||||||
if kind == NonterminalKind::Pat(PatWithOr)
|
if kind == NonterminalKind::Pat(PatWithOr)
|
||||||
&& sess.psess.edition.at_least_rust_2021()
|
&& sess.psess.edition.at_least_rust_2021()
|
||||||
&& next_token.is_token(&BinOp(token::BinOpToken::Or))
|
&& next_token.is_token(&token::Or)
|
||||||
{
|
{
|
||||||
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
|
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
|
||||||
span,
|
span,
|
||||||
|
@ -1296,7 +1296,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
||||||
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
|
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
|
||||||
match tok {
|
match tok {
|
||||||
TokenTree::Token(token) => match token.kind {
|
TokenTree::Token(token) => match token.kind {
|
||||||
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
|
FatArrow | Comma | Eq | Or => IsInFollow::Yes,
|
||||||
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
|
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
|
||||||
IsInFollow::Yes
|
IsInFollow::Yes
|
||||||
}
|
}
|
||||||
|
@ -1332,9 +1332,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
||||||
| Colon
|
| Colon
|
||||||
| Eq
|
| Eq
|
||||||
| Gt
|
| Gt
|
||||||
| BinOp(token::Shr)
|
| Shr
|
||||||
| Semi
|
| Semi
|
||||||
| BinOp(token::Or) => IsInFollow::Yes,
|
| Or => IsInFollow::Yes,
|
||||||
Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
|
Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
|
||||||
IsInFollow::Yes
|
IsInFollow::Yes
|
||||||
}
|
}
|
||||||
|
|
|
@ -302,8 +302,8 @@ fn parse_tree<'a>(
|
||||||
/// `None`.
|
/// `None`.
|
||||||
fn kleene_op(token: &Token) -> Option<KleeneOp> {
|
fn kleene_op(token: &Token) -> Option<KleeneOp> {
|
||||||
match token.kind {
|
match token.kind {
|
||||||
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
|
token::Star => Some(KleeneOp::ZeroOrMore),
|
||||||
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
|
token::Plus => Some(KleeneOp::OneOrMore),
|
||||||
token::Question => Some(KleeneOp::ZeroOrOne),
|
token::Question => Some(KleeneOp::ZeroOrOne),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -182,26 +182,26 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
||||||
OrOr => op("||"),
|
OrOr => op("||"),
|
||||||
Not => op("!"),
|
Not => op("!"),
|
||||||
Tilde => op("~"),
|
Tilde => op("~"),
|
||||||
BinOp(Plus) => op("+"),
|
Plus => op("+"),
|
||||||
BinOp(Minus) => op("-"),
|
Minus => op("-"),
|
||||||
BinOp(Star) => op("*"),
|
Star => op("*"),
|
||||||
BinOp(Slash) => op("/"),
|
Slash => op("/"),
|
||||||
BinOp(Percent) => op("%"),
|
Percent => op("%"),
|
||||||
BinOp(Caret) => op("^"),
|
Caret => op("^"),
|
||||||
BinOp(And) => op("&"),
|
And => op("&"),
|
||||||
BinOp(Or) => op("|"),
|
Or => op("|"),
|
||||||
BinOp(Shl) => op("<<"),
|
Shl => op("<<"),
|
||||||
BinOp(Shr) => op(">>"),
|
Shr => op(">>"),
|
||||||
BinOpEq(Plus) => op("+="),
|
PlusEq => op("+="),
|
||||||
BinOpEq(Minus) => op("-="),
|
MinusEq => op("-="),
|
||||||
BinOpEq(Star) => op("*="),
|
StarEq => op("*="),
|
||||||
BinOpEq(Slash) => op("/="),
|
SlashEq => op("/="),
|
||||||
BinOpEq(Percent) => op("%="),
|
PercentEq => op("%="),
|
||||||
BinOpEq(Caret) => op("^="),
|
CaretEq => op("^="),
|
||||||
BinOpEq(And) => op("&="),
|
AndEq => op("&="),
|
||||||
BinOpEq(Or) => op("|="),
|
OrEq => op("|="),
|
||||||
BinOpEq(Shl) => op("<<="),
|
ShlEq => op("<<="),
|
||||||
BinOpEq(Shr) => op(">>="),
|
ShrEq => op(">>="),
|
||||||
At => op("@"),
|
At => op("@"),
|
||||||
Dot => op("."),
|
Dot => op("."),
|
||||||
DotDot => op(".."),
|
DotDot => op(".."),
|
||||||
|
@ -324,14 +324,14 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
||||||
b'>' => Gt,
|
b'>' => Gt,
|
||||||
b'!' => Not,
|
b'!' => Not,
|
||||||
b'~' => Tilde,
|
b'~' => Tilde,
|
||||||
b'+' => BinOp(Plus),
|
b'+' => Plus,
|
||||||
b'-' => BinOp(Minus),
|
b'-' => Minus,
|
||||||
b'*' => BinOp(Star),
|
b'*' => Star,
|
||||||
b'/' => BinOp(Slash),
|
b'/' => Slash,
|
||||||
b'%' => BinOp(Percent),
|
b'%' => Percent,
|
||||||
b'^' => BinOp(Caret),
|
b'^' => Caret,
|
||||||
b'&' => BinOp(And),
|
b'&' => And,
|
||||||
b'|' => BinOp(Or),
|
b'|' => Or,
|
||||||
b'@' => At,
|
b'@' => At,
|
||||||
b'.' => Dot,
|
b'.' => Dot,
|
||||||
b',' => Comma,
|
b',' => Comma,
|
||||||
|
@ -372,10 +372,9 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
||||||
suffix,
|
suffix,
|
||||||
span,
|
span,
|
||||||
}) if symbol.as_str().starts_with('-') => {
|
}) if symbol.as_str().starts_with('-') => {
|
||||||
let minus = BinOp(BinOpToken::Minus);
|
|
||||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||||
let integer = TokenKind::lit(token::Integer, symbol, suffix);
|
let integer = TokenKind::lit(token::Integer, symbol, suffix);
|
||||||
let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
|
let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
|
||||||
let b = tokenstream::TokenTree::token_alone(integer, span);
|
let b = tokenstream::TokenTree::token_alone(integer, span);
|
||||||
smallvec![a, b]
|
smallvec![a, b]
|
||||||
}
|
}
|
||||||
|
@ -385,10 +384,9 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
||||||
suffix,
|
suffix,
|
||||||
span,
|
span,
|
||||||
}) if symbol.as_str().starts_with('-') => {
|
}) if symbol.as_str().starts_with('-') => {
|
||||||
let minus = BinOp(BinOpToken::Minus);
|
|
||||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||||
let float = TokenKind::lit(token::Float, symbol, suffix);
|
let float = TokenKind::lit(token::Float, symbol, suffix);
|
||||||
let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
|
let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
|
||||||
let b = tokenstream::TokenTree::token_alone(float, span);
|
let b = tokenstream::TokenTree::token_alone(float, span);
|
||||||
smallvec![a, b]
|
smallvec![a, b]
|
||||||
}
|
}
|
||||||
|
@ -599,10 +597,7 @@ impl server::TokenStream for Rustc<'_, '_> {
|
||||||
Ok(Self::TokenStream::from_iter([
|
Ok(Self::TokenStream::from_iter([
|
||||||
// FIXME: The span of the `-` token is lost when
|
// FIXME: The span of the `-` token is lost when
|
||||||
// parsing, so we cannot faithfully recover it here.
|
// parsing, so we cannot faithfully recover it here.
|
||||||
tokenstream::TokenTree::token_joint_hidden(
|
tokenstream::TokenTree::token_joint_hidden(token::Minus, e.span),
|
||||||
token::BinOp(token::Minus),
|
|
||||||
e.span,
|
|
||||||
),
|
|
||||||
tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
|
tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
|
||||||
]))
|
]))
|
||||||
}
|
}
|
||||||
|
|
|
@ -387,14 +387,14 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||||
rustc_lexer::TokenKind::Bang => token::Not,
|
rustc_lexer::TokenKind::Bang => token::Not,
|
||||||
rustc_lexer::TokenKind::Lt => token::Lt,
|
rustc_lexer::TokenKind::Lt => token::Lt,
|
||||||
rustc_lexer::TokenKind::Gt => token::Gt,
|
rustc_lexer::TokenKind::Gt => token::Gt,
|
||||||
rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
|
rustc_lexer::TokenKind::Minus => token::Minus,
|
||||||
rustc_lexer::TokenKind::And => token::BinOp(token::And),
|
rustc_lexer::TokenKind::And => token::And,
|
||||||
rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
|
rustc_lexer::TokenKind::Or => token::Or,
|
||||||
rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
|
rustc_lexer::TokenKind::Plus => token::Plus,
|
||||||
rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
|
rustc_lexer::TokenKind::Star => token::Star,
|
||||||
rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
|
rustc_lexer::TokenKind::Slash => token::Slash,
|
||||||
rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
|
rustc_lexer::TokenKind::Caret => token::Caret,
|
||||||
rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
|
rustc_lexer::TokenKind::Percent => token::Percent,
|
||||||
|
|
||||||
rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
|
rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
|
||||||
// Don't emit diagnostics for sequences of the same invalid token
|
// Don't emit diagnostics for sequences of the same invalid token
|
||||||
|
|
|
@ -308,7 +308,7 @@ pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[
|
||||||
const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
||||||
(" ", "Space", None),
|
(" ", "Space", None),
|
||||||
("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
|
("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
|
||||||
("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
|
("-", "Minus/Hyphen", Some(token::Minus)),
|
||||||
(",", "Comma", Some(token::Comma)),
|
(",", "Comma", Some(token::Comma)),
|
||||||
(";", "Semicolon", Some(token::Semi)),
|
(";", "Semicolon", Some(token::Semi)),
|
||||||
(":", "Colon", Some(token::Colon)),
|
(":", "Colon", Some(token::Colon)),
|
||||||
|
@ -321,11 +321,11 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
||||||
("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
|
("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
|
||||||
("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
|
("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
|
||||||
("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
|
("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
|
||||||
("*", "Asterisk", Some(token::BinOp(token::Star))),
|
("*", "Asterisk", Some(token::Star)),
|
||||||
("/", "Slash", Some(token::BinOp(token::Slash))),
|
("/", "Slash", Some(token::Slash)),
|
||||||
("\\", "Backslash", None),
|
("\\", "Backslash", None),
|
||||||
("&", "Ampersand", Some(token::BinOp(token::And))),
|
("&", "Ampersand", Some(token::And)),
|
||||||
("+", "Plus Sign", Some(token::BinOp(token::Plus))),
|
("+", "Plus Sign", Some(token::Plus)),
|
||||||
("<", "Less-Than Sign", Some(token::Lt)),
|
("<", "Less-Than Sign", Some(token::Lt)),
|
||||||
("=", "Equals Sign", Some(token::Eq)),
|
("=", "Equals Sign", Some(token::Eq)),
|
||||||
("==", "Double Equals Sign", Some(token::EqEq)),
|
("==", "Double Equals Sign", Some(token::EqEq)),
|
||||||
|
|
|
@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut number_of_gt = 0;
|
let mut number_of_gt = 0;
|
||||||
while self.look_ahead(position, |t| {
|
while self.look_ahead(position, |t| {
|
||||||
trace!("check_trailing_angle_brackets: t={:?}", t);
|
trace!("check_trailing_angle_brackets: t={:?}", t);
|
||||||
if *t == token::BinOp(token::BinOpToken::Shr) {
|
if *t == token::Shr {
|
||||||
number_of_shr += 1;
|
number_of_shr += 1;
|
||||||
true
|
true
|
||||||
} else if *t == token::Gt {
|
} else if *t == token::Gt {
|
||||||
|
@ -1222,7 +1222,7 @@ impl<'a> Parser<'a> {
|
||||||
let span = lo.to(self.prev_token.span);
|
let span = lo.to(self.prev_token.span);
|
||||||
// Detect trailing `>` like in `x.collect::Vec<_>>()`.
|
// Detect trailing `>` like in `x.collect::Vec<_>>()`.
|
||||||
let mut trailing_span = self.prev_token.span.shrink_to_hi();
|
let mut trailing_span = self.prev_token.span.shrink_to_hi();
|
||||||
while self.token == token::BinOp(token::Shr) || self.token == token::Gt {
|
while self.token == token::Shr || self.token == token::Gt {
|
||||||
trailing_span = trailing_span.to(self.token.span);
|
trailing_span = trailing_span.to(self.token.span);
|
||||||
self.bump();
|
self.bump();
|
||||||
}
|
}
|
||||||
|
@ -1468,8 +1468,7 @@ impl<'a> Parser<'a> {
|
||||||
let snapshot = self.create_snapshot_for_diagnostic();
|
let snapshot = self.create_snapshot_for_diagnostic();
|
||||||
self.bump();
|
self.bump();
|
||||||
// So far we have parsed `foo<bar<`, consume the rest of the type args.
|
// So far we have parsed `foo<bar<`, consume the rest of the type args.
|
||||||
let modifiers =
|
let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)];
|
||||||
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
|
|
||||||
self.consume_tts(1, &modifiers);
|
self.consume_tts(1, &modifiers);
|
||||||
|
|
||||||
if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep]
|
if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep]
|
||||||
|
@ -2613,8 +2612,7 @@ impl<'a> Parser<'a> {
|
||||||
|| self.token == TokenKind::Dot;
|
|| self.token == TokenKind::Dot;
|
||||||
// This will be true when a trait object type `Foo +` or a path which was a `const fn` with
|
// This will be true when a trait object type `Foo +` or a path which was a `const fn` with
|
||||||
// type params has been parsed.
|
// type params has been parsed.
|
||||||
let was_op =
|
let was_op = matches!(self.prev_token.kind, token::Plus | token::Shr | token::Gt);
|
||||||
matches!(self.prev_token.kind, token::BinOp(token::Plus | token::Shr) | token::Gt);
|
|
||||||
if !is_op_or_dot && !was_op {
|
if !is_op_or_dot && !was_op {
|
||||||
// We perform these checks and early return to avoid taking a snapshot unnecessarily.
|
// We perform these checks and early return to avoid taking a snapshot unnecessarily.
|
||||||
return Err(err);
|
return Err(err);
|
||||||
|
@ -2992,8 +2990,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
pub(super) fn recover_vcs_conflict_marker(&mut self) {
|
pub(super) fn recover_vcs_conflict_marker(&mut self) {
|
||||||
// <<<<<<<
|
// <<<<<<<
|
||||||
let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt)
|
let Some(start) = self.conflict_marker(&TokenKind::Shl, &TokenKind::Lt) else {
|
||||||
else {
|
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let mut spans = Vec::with_capacity(3);
|
let mut spans = Vec::with_capacity(3);
|
||||||
|
@ -3008,15 +3005,13 @@ impl<'a> Parser<'a> {
|
||||||
if self.token == TokenKind::Eof {
|
if self.token == TokenKind::Eof {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or))
|
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::Or) {
|
||||||
{
|
|
||||||
middlediff3 = Some(span);
|
middlediff3 = Some(span);
|
||||||
}
|
}
|
||||||
if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) {
|
if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) {
|
||||||
middle = Some(span);
|
middle = Some(span);
|
||||||
}
|
}
|
||||||
if let Some(span) = self.conflict_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt)
|
if let Some(span) = self.conflict_marker(&TokenKind::Shr, &TokenKind::Gt) {
|
||||||
{
|
|
||||||
spans.push(span);
|
spans.push(span);
|
||||||
end = Some(span);
|
end = Some(span);
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -239,8 +239,8 @@ impl<'a> Parser<'a> {
|
||||||
self.bump();
|
self.bump();
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.prev_token == token::BinOp(token::Plus)
|
if self.prev_token == token::Plus
|
||||||
&& self.token == token::BinOp(token::Plus)
|
&& self.token == token::Plus
|
||||||
&& self.prev_token.span.between(self.token.span).is_empty()
|
&& self.prev_token.span.between(self.token.span).is_empty()
|
||||||
{
|
{
|
||||||
let op_span = self.prev_token.span.to(self.token.span);
|
let op_span = self.prev_token.span.to(self.token.span);
|
||||||
|
@ -250,8 +250,8 @@ impl<'a> Parser<'a> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.prev_token == token::BinOp(token::Minus)
|
if self.prev_token == token::Minus
|
||||||
&& self.token == token::BinOp(token::Minus)
|
&& self.token == token::Minus
|
||||||
&& self.prev_token.span.between(self.token.span).is_empty()
|
&& self.prev_token.span.between(self.token.span).is_empty()
|
||||||
&& !self.look_ahead(1, |tok| tok.can_begin_expr())
|
&& !self.look_ahead(1, |tok| tok.can_begin_expr())
|
||||||
{
|
{
|
||||||
|
@ -509,19 +509,19 @@ impl<'a> Parser<'a> {
|
||||||
// `~expr`
|
// `~expr`
|
||||||
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
|
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
|
||||||
// `-expr`
|
// `-expr`
|
||||||
token::BinOp(token::Minus) => {
|
token::Minus => {
|
||||||
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
|
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
|
||||||
}
|
}
|
||||||
// `*expr`
|
// `*expr`
|
||||||
token::BinOp(token::Star) => {
|
token::Star => {
|
||||||
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
|
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
|
||||||
}
|
}
|
||||||
// `&expr` and `&&expr`
|
// `&expr` and `&&expr`
|
||||||
token::BinOp(token::And) | token::AndAnd => {
|
token::And | token::AndAnd => {
|
||||||
make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
|
make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
|
||||||
}
|
}
|
||||||
// `+lit`
|
// `+lit`
|
||||||
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
token::Plus if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
||||||
let mut err = errors::LeadingPlusNotSupported {
|
let mut err = errors::LeadingPlusNotSupported {
|
||||||
span: lo,
|
span: lo,
|
||||||
remove_plus: None,
|
remove_plus: None,
|
||||||
|
@ -541,9 +541,7 @@ impl<'a> Parser<'a> {
|
||||||
this.parse_expr_prefix(attrs)
|
this.parse_expr_prefix(attrs)
|
||||||
}
|
}
|
||||||
// Recover from `++x`:
|
// Recover from `++x`:
|
||||||
token::BinOp(token::Plus)
|
token::Plus if this.look_ahead(1, |t| *t == token::Plus) => {
|
||||||
if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) =>
|
|
||||||
{
|
|
||||||
let starts_stmt = this.prev_token == token::Semi
|
let starts_stmt = this.prev_token == token::Semi
|
||||||
|| this.prev_token == token::CloseDelim(Delimiter::Brace);
|
|| this.prev_token == token::CloseDelim(Delimiter::Brace);
|
||||||
let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
|
let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
|
||||||
|
@ -723,14 +721,12 @@ impl<'a> Parser<'a> {
|
||||||
suggestion,
|
suggestion,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
token::BinOp(token::Shl) => {
|
token::Shl => self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
|
||||||
self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
|
shift: self.token.span,
|
||||||
shift: self.token.span,
|
r#type: path,
|
||||||
r#type: path,
|
args: args_span,
|
||||||
args: args_span,
|
suggestion,
|
||||||
suggestion,
|
}),
|
||||||
})
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
// We can end up here even without `<` being the next token, for
|
// We can end up here even without `<` being the next token, for
|
||||||
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
||||||
|
@ -2595,7 +2591,7 @@ impl<'a> Parser<'a> {
|
||||||
missing_let: None,
|
missing_let: None,
|
||||||
comparison: None,
|
comparison: None,
|
||||||
};
|
};
|
||||||
if self.prev_token == token::BinOp(token::Or) {
|
if self.prev_token == token::Or {
|
||||||
// This was part of a closure, the that part of the parser recover.
|
// This was part of a closure, the that part of the parser recover.
|
||||||
return Err(self.dcx().create_err(err));
|
return Err(self.dcx().create_err(err));
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1293,7 +1293,7 @@ impl<'a> Parser<'a> {
|
||||||
if token.is_keyword(kw::Move) {
|
if token.is_keyword(kw::Move) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
matches!(token.kind, token::BinOp(token::Or) | token::OrOr)
|
matches!(token.kind, token::Or | token::OrOr)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
// `$qual static`
|
// `$qual static`
|
||||||
|
@ -1814,7 +1814,7 @@ impl<'a> Parser<'a> {
|
||||||
let attrs = p.parse_outer_attributes()?;
|
let attrs = p.parse_outer_attributes()?;
|
||||||
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
|
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
|
||||||
let mut snapshot = None;
|
let mut snapshot = None;
|
||||||
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
if p.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
|
||||||
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
||||||
// that can be a valid type start, so we snapshot and reparse only we've
|
// that can be a valid type start, so we snapshot and reparse only we've
|
||||||
// encountered another parse error.
|
// encountered another parse error.
|
||||||
|
@ -3011,7 +3011,7 @@ impl<'a> Parser<'a> {
|
||||||
// else is parsed as a normal function parameter list, so some lookahead is required.
|
// else is parsed as a normal function parameter list, so some lookahead is required.
|
||||||
let eself_lo = self.token.span;
|
let eself_lo = self.token.span;
|
||||||
let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind {
|
let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind {
|
||||||
token::BinOp(token::And) => {
|
token::And => {
|
||||||
let eself = if is_isolated_self(self, 1) {
|
let eself = if is_isolated_self(self, 1) {
|
||||||
// `&self`
|
// `&self`
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -3041,12 +3041,12 @@ impl<'a> Parser<'a> {
|
||||||
(eself, self_ident, hi)
|
(eself, self_ident, hi)
|
||||||
}
|
}
|
||||||
// `*self`
|
// `*self`
|
||||||
token::BinOp(token::Star) if is_isolated_self(self, 1) => {
|
token::Star if is_isolated_self(self, 1) => {
|
||||||
self.bump();
|
self.bump();
|
||||||
recover_self_ptr(self)?
|
recover_self_ptr(self)?
|
||||||
}
|
}
|
||||||
// `*mut self` and `*const self`
|
// `*mut self` and `*const self`
|
||||||
token::BinOp(token::Star)
|
token::Star
|
||||||
if self.look_ahead(1, |t| t.is_mutability()) && is_isolated_self(self, 2) =>
|
if self.look_ahead(1, |t| t.is_mutability()) && is_isolated_self(self, 2) =>
|
||||||
{
|
{
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -3077,7 +3077,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
_ => 0,
|
_ => 0,
|
||||||
},
|
},
|
||||||
token::BinOp(token::And) | token::AndAnd => 1,
|
token::And | token::AndAnd => 1,
|
||||||
_ if self.token.is_keyword(kw::Mut) => 1,
|
_ if self.token.is_keyword(kw::Mut) => 1,
|
||||||
_ => 0,
|
_ => 0,
|
||||||
};
|
};
|
||||||
|
|
|
@ -813,9 +813,9 @@ impl<'a> Parser<'a> {
|
||||||
self.is_keyword_ahead(0, &[kw::Const])
|
self.is_keyword_ahead(0, &[kw::Const])
|
||||||
&& self.look_ahead(1, |t| match &t.kind {
|
&& self.look_ahead(1, |t| match &t.kind {
|
||||||
// async closures do not work with const closures, so we do not parse that here.
|
// async closures do not work with const closures, so we do not parse that here.
|
||||||
token::Ident(kw::Move | kw::Static, IdentIsRaw::No)
|
token::Ident(kw::Move | kw::Static, IdentIsRaw::No) | token::OrOr | token::Or => {
|
||||||
| token::OrOr
|
true
|
||||||
| token::BinOp(token::Or) => true,
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1651,7 +1651,7 @@ impl<'a> Parser<'a> {
|
||||||
/// `::{` or `::*`
|
/// `::{` or `::*`
|
||||||
fn is_import_coupler(&mut self) -> bool {
|
fn is_import_coupler(&mut self) -> bool {
|
||||||
self.check_path_sep_and_look_ahead(|t| {
|
self.check_path_sep_and_look_ahead(|t| {
|
||||||
matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::BinOp(token::Star))
|
matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::ops::Bound;
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::NtPatKind::*;
|
use rustc_ast::token::NtPatKind::*;
|
||||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, MetaVarKind, Token};
|
use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token};
|
||||||
use rustc_ast::util::parser::ExprPrecedence;
|
use rustc_ast::util::parser::ExprPrecedence;
|
||||||
use rustc_ast::visit::{self, Visitor};
|
use rustc_ast::visit::{self, Visitor};
|
||||||
use rustc_ast::{
|
use rustc_ast::{
|
||||||
|
@ -358,7 +358,7 @@ impl<'a> Parser<'a> {
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
match (is_end_ahead, &self.token.kind) {
|
match (is_end_ahead, &self.token.kind) {
|
||||||
(true, token::BinOp(token::Or) | token::OrOr) => {
|
(true, token::Or | token::OrOr) => {
|
||||||
// A `|` or possibly `||` token shouldn't be here. Ban it.
|
// A `|` or possibly `||` token shouldn't be here. Ban it.
|
||||||
self.dcx().emit_err(TrailingVertNotAllowed {
|
self.dcx().emit_err(TrailingVertNotAllowed {
|
||||||
span: self.token.span,
|
span: self.token.span,
|
||||||
|
@ -432,7 +432,11 @@ impl<'a> Parser<'a> {
|
||||||
// `[` is included for indexing operations,
|
// `[` is included for indexing operations,
|
||||||
// `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`),
|
// `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`),
|
||||||
// `as` is included for type casts
|
// `as` is included for type casts
|
||||||
let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or)
|
let has_trailing_operator = matches!(
|
||||||
|
self.token.kind,
|
||||||
|
token::Plus | token::Minus | token::Star | token::Slash | token::Percent
|
||||||
|
| token::Caret | token::And | token::Shl | token::Shr // excludes `Or`
|
||||||
|
)
|
||||||
|| self.token == token::Question
|
|| self.token == token::Question
|
||||||
|| (self.token == token::OpenDelim(Delimiter::Bracket)
|
|| (self.token == token::OpenDelim(Delimiter::Bracket)
|
||||||
&& self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))) // excludes `[]`
|
&& self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))) // excludes `[]`
|
||||||
|
@ -1255,7 +1259,7 @@ impl<'a> Parser<'a> {
|
||||||
|| self.look_ahead(dist, |t| {
|
|| self.look_ahead(dist, |t| {
|
||||||
t.is_path_start() // e.g. `MY_CONST`;
|
t.is_path_start() // e.g. `MY_CONST`;
|
||||||
|| *t == token::Dot // e.g. `.5` for recovery;
|
|| *t == token::Dot // e.g. `.5` for recovery;
|
||||||
|| matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
|
|| matches!(t.kind, token::Literal(..) | token::Minus)
|
||||||
|| t.is_bool_lit()
|
|| t.is_bool_lit()
|
||||||
|| t.is_whole_expr()
|
|| t.is_whole_expr()
|
||||||
|| t.is_lifetime() // recover `'a` instead of `'a'`
|
|| t.is_lifetime() // recover `'a` instead of `'a'`
|
||||||
|
|
|
@ -305,10 +305,7 @@ impl<'a> Parser<'a> {
|
||||||
let is_args_start = |token: &Token| {
|
let is_args_start = |token: &Token| {
|
||||||
matches!(
|
matches!(
|
||||||
token.kind,
|
token.kind,
|
||||||
token::Lt
|
token::Lt | token::Shl | token::OpenDelim(Delimiter::Parenthesis) | token::LArrow
|
||||||
| token::BinOp(token::Shl)
|
|
||||||
| token::OpenDelim(Delimiter::Parenthesis)
|
|
||||||
| token::LArrow
|
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
let check_args_start = |this: &mut Self| {
|
let check_args_start = |this: &mut Self| {
|
||||||
|
|
|
@ -442,7 +442,16 @@ impl<'a> Parser<'a> {
|
||||||
/// Parses the RHS of a local variable declaration (e.g., `= 14;`).
|
/// Parses the RHS of a local variable declaration (e.g., `= 14;`).
|
||||||
fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> {
|
fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> {
|
||||||
let eq_consumed = match self.token.kind {
|
let eq_consumed = match self.token.kind {
|
||||||
token::BinOpEq(..) => {
|
token::PlusEq
|
||||||
|
| token::MinusEq
|
||||||
|
| token::StarEq
|
||||||
|
| token::SlashEq
|
||||||
|
| token::PercentEq
|
||||||
|
| token::CaretEq
|
||||||
|
| token::AndEq
|
||||||
|
| token::OrEq
|
||||||
|
| token::ShlEq
|
||||||
|
| token::ShrEq => {
|
||||||
// Recover `let x <op>= 1` as `let x = 1` We must not use `+ BytePos(1)` here
|
// Recover `let x <op>= 1` as `let x = 1` We must not use `+ BytePos(1)` here
|
||||||
// because `<op>` can be a multi-byte lookalike that was recovered, e.g. `➖=` (the
|
// because `<op>` can be a multi-byte lookalike that was recovered, e.g. `➖=` (the
|
||||||
// `➖` is a U+2796 Heavy Minus Sign Unicode Character) that was recovered as a
|
// `➖` is a U+2796 Heavy Minus Sign Unicode Character) that was recovered as a
|
||||||
|
@ -688,7 +697,7 @@ impl<'a> Parser<'a> {
|
||||||
if self.token == token::Eof {
|
if self.token == token::Eof {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if self.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
if self.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
|
||||||
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
||||||
// that can be a valid path start, so we snapshot and reparse only we've
|
// that can be a valid path start, so we snapshot and reparse only we've
|
||||||
// encountered another parse error.
|
// encountered another parse error.
|
||||||
|
|
|
@ -445,12 +445,6 @@ macro_rules! exp {
|
||||||
token_type: $crate::parser::token_type::TokenType::$tok
|
token_type: $crate::parser::token_type::TokenType::$tok
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
(@binop, $op:ident) => {
|
|
||||||
$crate::parser::token_type::ExpTokenPair {
|
|
||||||
tok: &rustc_ast::token::BinOp(rustc_ast::token::BinOpToken::$op),
|
|
||||||
token_type: $crate::parser::token_type::TokenType::$op,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
(@open, $delim:ident, $token_type:ident) => {
|
(@open, $delim:ident, $token_type:ident) => {
|
||||||
$crate::parser::token_type::ExpTokenPair {
|
$crate::parser::token_type::ExpTokenPair {
|
||||||
tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim),
|
tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim),
|
||||||
|
@ -487,6 +481,11 @@ macro_rules! exp {
|
||||||
(OrOr) => { exp!(@tok, OrOr) };
|
(OrOr) => { exp!(@tok, OrOr) };
|
||||||
(Not) => { exp!(@tok, Not) };
|
(Not) => { exp!(@tok, Not) };
|
||||||
(Tilde) => { exp!(@tok, Tilde) };
|
(Tilde) => { exp!(@tok, Tilde) };
|
||||||
|
(Plus) => { exp!(@tok, Plus) };
|
||||||
|
(Minus) => { exp!(@tok, Minus) };
|
||||||
|
(Star) => { exp!(@tok, Star) };
|
||||||
|
(And) => { exp!(@tok, And) };
|
||||||
|
(Or) => { exp!(@tok, Or) };
|
||||||
(At) => { exp!(@tok, At) };
|
(At) => { exp!(@tok, At) };
|
||||||
(Dot) => { exp!(@tok, Dot) };
|
(Dot) => { exp!(@tok, Dot) };
|
||||||
(DotDot) => { exp!(@tok, DotDot) };
|
(DotDot) => { exp!(@tok, DotDot) };
|
||||||
|
@ -502,12 +501,6 @@ macro_rules! exp {
|
||||||
(Question) => { exp!(@tok, Question) };
|
(Question) => { exp!(@tok, Question) };
|
||||||
(Eof) => { exp!(@tok, Eof) };
|
(Eof) => { exp!(@tok, Eof) };
|
||||||
|
|
||||||
(Plus) => { exp!(@binop, Plus) };
|
|
||||||
(Minus) => { exp!(@binop, Minus) };
|
|
||||||
(Star) => { exp!(@binop, Star) };
|
|
||||||
(And) => { exp!(@binop, And) };
|
|
||||||
(Or) => { exp!(@binop, Or) };
|
|
||||||
|
|
||||||
(OpenParen) => { exp!(@open, Parenthesis, OpenParen) };
|
(OpenParen) => { exp!(@open, Parenthesis, OpenParen) };
|
||||||
(OpenBrace) => { exp!(@open, Brace, OpenBrace) };
|
(OpenBrace) => { exp!(@open, Brace, OpenBrace) };
|
||||||
(OpenBracket) => { exp!(@open, Bracket, OpenBracket) };
|
(OpenBracket) => { exp!(@open, Bracket, OpenBracket) };
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind};
|
||||||
use rustc_ast::util::case::Case;
|
use rustc_ast::util::case::Case;
|
||||||
use rustc_ast::{
|
use rustc_ast::{
|
||||||
self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy,
|
self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy,
|
||||||
|
@ -86,7 +86,7 @@ enum AllowCVariadic {
|
||||||
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
|
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
|
||||||
/// that `IDENT` is not the ident of a fn trait.
|
/// that `IDENT` is not the ident of a fn trait.
|
||||||
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
|
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
|
||||||
t == &token::PathSep || t == &token::Lt || t == &token::BinOp(token::Shl)
|
t == &token::PathSep || t == &token::Lt || t == &token::Shl
|
||||||
}
|
}
|
||||||
|
|
||||||
fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
|
fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
|
||||||
|
@ -399,7 +399,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut trailing_plus = false;
|
let mut trailing_plus = false;
|
||||||
let (ts, trailing) = self.parse_paren_comma_seq(|p| {
|
let (ts, trailing) = self.parse_paren_comma_seq(|p| {
|
||||||
let ty = p.parse_ty()?;
|
let ty = p.parse_ty()?;
|
||||||
trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus);
|
trailing_plus = p.prev_token == TokenKind::Plus;
|
||||||
Ok(ty)
|
Ok(ty)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -735,7 +735,7 @@ impl<'a> Parser<'a> {
|
||||||
// Always parse bounds greedily for better error recovery.
|
// Always parse bounds greedily for better error recovery.
|
||||||
let bounds = self.parse_generic_bounds()?;
|
let bounds = self.parse_generic_bounds()?;
|
||||||
|
|
||||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
|
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
|
||||||
|
|
||||||
Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
|
Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
|
||||||
}
|
}
|
||||||
|
@ -747,11 +747,7 @@ impl<'a> Parser<'a> {
|
||||||
self.expect_lt()?;
|
self.expect_lt()?;
|
||||||
let (args, _, _) = self.parse_seq_to_before_tokens(
|
let (args, _, _) = self.parse_seq_to_before_tokens(
|
||||||
&[exp!(Gt)],
|
&[exp!(Gt)],
|
||||||
&[
|
&[&TokenKind::Ge, &TokenKind::Shr, &TokenKind::Shr],
|
||||||
&TokenKind::Ge,
|
|
||||||
&TokenKind::BinOp(BinOpToken::Shr),
|
|
||||||
&TokenKind::BinOpEq(BinOpToken::Shr),
|
|
||||||
],
|
|
||||||
SeqSep::trailing_allowed(exp!(Comma)),
|
SeqSep::trailing_allowed(exp!(Comma)),
|
||||||
|self_| {
|
|self_| {
|
||||||
if self_.check_keyword(exp!(SelfUpper)) {
|
if self_.check_keyword(exp!(SelfUpper)) {
|
||||||
|
@ -781,7 +777,7 @@ impl<'a> Parser<'a> {
|
||||||
self.check_keyword(exp!(Dyn))
|
self.check_keyword(exp!(Dyn))
|
||||||
&& (self.token.uninterpolated_span().at_least_rust_2018()
|
&& (self.token.uninterpolated_span().at_least_rust_2018()
|
||||||
|| self.look_ahead(1, |t| {
|
|| self.look_ahead(1, |t| {
|
||||||
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
|
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star)
|
||||||
&& !can_continue_type_after_non_fn_ident(t)
|
&& !can_continue_type_after_non_fn_ident(t)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -803,7 +799,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
// Always parse bounds greedily for better error recovery.
|
// Always parse bounds greedily for better error recovery.
|
||||||
let bounds = self.parse_generic_bounds()?;
|
let bounds = self.parse_generic_bounds()?;
|
||||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
|
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
|
||||||
Ok(TyKind::TraitObject(bounds, syntax))
|
Ok(TyKind::TraitObject(bounds, syntax))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw};
|
use rustc_ast::token::{self, Delimiter, IdentIsRaw};
|
||||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||||
use rustc_ast_pretty::pprust::PrintState;
|
use rustc_ast_pretty::pprust::PrintState;
|
||||||
use rustc_ast_pretty::pprust::state::State as Printer;
|
use rustc_ast_pretty::pprust::state::State as Printer;
|
||||||
|
@ -137,14 +137,9 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||||
(Dollar, token::Ident(..)) => (false, DollarIdent),
|
(Dollar, token::Ident(..)) => (false, DollarIdent),
|
||||||
(DollarIdent, token::Colon) => (false, DollarIdentColon),
|
(DollarIdent, token::Colon) => (false, DollarIdentColon),
|
||||||
(DollarIdentColon, token::Ident(..)) => (false, Other),
|
(DollarIdentColon, token::Ident(..)) => (false, Other),
|
||||||
(
|
(DollarParen, token::Plus | token::Star | token::Question) => (false, Other),
|
||||||
DollarParen,
|
|
||||||
token::BinOp(BinOpToken::Plus | BinOpToken::Star) | token::Question,
|
|
||||||
) => (false, Other),
|
|
||||||
(DollarParen, _) => (false, DollarParenSep),
|
(DollarParen, _) => (false, DollarParenSep),
|
||||||
(DollarParenSep, token::BinOp(BinOpToken::Plus | BinOpToken::Star)) => {
|
(DollarParenSep, token::Plus | token::Star) => (false, Other),
|
||||||
(false, Other)
|
|
||||||
}
|
|
||||||
(Pound, token::Not) => (false, PoundBang),
|
(Pound, token::Not) => (false, PoundBang),
|
||||||
(_, token::Ident(symbol, IdentIsRaw::No))
|
(_, token::Ident(symbol, IdentIsRaw::No))
|
||||||
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
|
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::panic::{AssertUnwindSafe, catch_unwind};
|
use std::panic::{AssertUnwindSafe, catch_unwind};
|
||||||
|
|
||||||
use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
|
use rustc_ast::token::{Delimiter, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree};
|
use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree};
|
||||||
use rustc_ast::{ast, ptr};
|
use rustc_ast::{ast, ptr};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
|
@ -841,7 +841,7 @@ impl MacroArgParser {
|
||||||
match tok {
|
match tok {
|
||||||
TokenTree::Token(
|
TokenTree::Token(
|
||||||
Token {
|
Token {
|
||||||
kind: TokenKind::BinOp(BinOpToken::Plus),
|
kind: TokenKind::Plus,
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
_,
|
_,
|
||||||
|
@ -855,7 +855,7 @@ impl MacroArgParser {
|
||||||
)
|
)
|
||||||
| TokenTree::Token(
|
| TokenTree::Token(
|
||||||
Token {
|
Token {
|
||||||
kind: TokenKind::BinOp(BinOpToken::Star),
|
kind: TokenKind::Star,
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
_,
|
_,
|
||||||
|
@ -1090,12 +1090,30 @@ fn force_space_before(tok: &TokenKind) -> bool {
|
||||||
| TokenKind::OrOr
|
| TokenKind::OrOr
|
||||||
| TokenKind::Not
|
| TokenKind::Not
|
||||||
| TokenKind::Tilde
|
| TokenKind::Tilde
|
||||||
| TokenKind::BinOpEq(_)
|
| TokenKind::PlusEq
|
||||||
|
| TokenKind::MinusEq
|
||||||
|
| TokenKind::StarEq
|
||||||
|
| TokenKind::SlashEq
|
||||||
|
| TokenKind::PercentEq
|
||||||
|
| TokenKind::CaretEq
|
||||||
|
| TokenKind::AndEq
|
||||||
|
| TokenKind::OrEq
|
||||||
|
| TokenKind::ShlEq
|
||||||
|
| TokenKind::ShrEq
|
||||||
| TokenKind::At
|
| TokenKind::At
|
||||||
| TokenKind::RArrow
|
| TokenKind::RArrow
|
||||||
| TokenKind::LArrow
|
| TokenKind::LArrow
|
||||||
| TokenKind::FatArrow
|
| TokenKind::FatArrow
|
||||||
| TokenKind::BinOp(_)
|
| TokenKind::Plus
|
||||||
|
| TokenKind::Minus
|
||||||
|
| TokenKind::Star
|
||||||
|
| TokenKind::Slash
|
||||||
|
| TokenKind::Percent
|
||||||
|
| TokenKind::Caret
|
||||||
|
| TokenKind::And
|
||||||
|
| TokenKind::Or
|
||||||
|
| TokenKind::Shl
|
||||||
|
| TokenKind::Shr
|
||||||
| TokenKind::Pound
|
| TokenKind::Pound
|
||||||
| TokenKind::Dollar => true,
|
| TokenKind::Dollar => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -1114,7 +1132,7 @@ fn next_space(tok: &TokenKind) -> SpaceState {
|
||||||
|
|
||||||
match tok {
|
match tok {
|
||||||
TokenKind::Not
|
TokenKind::Not
|
||||||
| TokenKind::BinOp(BinOpToken::And)
|
| TokenKind::And
|
||||||
| TokenKind::Tilde
|
| TokenKind::Tilde
|
||||||
| TokenKind::At
|
| TokenKind::At
|
||||||
| TokenKind::Comma
|
| TokenKind::Comma
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue