Replace ast::TokenKind::BinOp{,Eq}
and remove BinOpToken
.
`BinOpToken` is badly named, because it only covers the assignable binary ops and excludes comparisons and `&&`/`||`. Its use in `ast::TokenKind` does allow a small amount of code sharing, but it's a clumsy factoring. This commit removes `ast::TokenKind::BinOp{,Eq}`, replacing each one with 10 individual variants. This makes `ast::TokenKind` more similar to `rustc_lexer::TokenKind`, which has individual variants for all operators. Although the number of lines of code increases, the number of chars decreases due to the frequent use of shorter names like `token::Plus` instead of `token::BinOp(BinOpToken::Plus)`.
This commit is contained in:
parent
7c4a55c2ac
commit
2a1e2e9632
19 changed files with 352 additions and 309 deletions
|
@ -239,8 +239,8 @@ impl<'a> Parser<'a> {
|
|||
self.bump();
|
||||
}
|
||||
|
||||
if self.prev_token == token::BinOp(token::Plus)
|
||||
&& self.token == token::BinOp(token::Plus)
|
||||
if self.prev_token == token::Plus
|
||||
&& self.token == token::Plus
|
||||
&& self.prev_token.span.between(self.token.span).is_empty()
|
||||
{
|
||||
let op_span = self.prev_token.span.to(self.token.span);
|
||||
|
@ -250,8 +250,8 @@ impl<'a> Parser<'a> {
|
|||
continue;
|
||||
}
|
||||
|
||||
if self.prev_token == token::BinOp(token::Minus)
|
||||
&& self.token == token::BinOp(token::Minus)
|
||||
if self.prev_token == token::Minus
|
||||
&& self.token == token::Minus
|
||||
&& self.prev_token.span.between(self.token.span).is_empty()
|
||||
&& !self.look_ahead(1, |tok| tok.can_begin_expr())
|
||||
{
|
||||
|
@ -509,19 +509,19 @@ impl<'a> Parser<'a> {
|
|||
// `~expr`
|
||||
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
|
||||
// `-expr`
|
||||
token::BinOp(token::Minus) => {
|
||||
token::Minus => {
|
||||
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
|
||||
}
|
||||
// `*expr`
|
||||
token::BinOp(token::Star) => {
|
||||
token::Star => {
|
||||
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
|
||||
}
|
||||
// `&expr` and `&&expr`
|
||||
token::BinOp(token::And) | token::AndAnd => {
|
||||
token::And | token::AndAnd => {
|
||||
make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
|
||||
}
|
||||
// `+lit`
|
||||
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
||||
token::Plus if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
||||
let mut err = errors::LeadingPlusNotSupported {
|
||||
span: lo,
|
||||
remove_plus: None,
|
||||
|
@ -541,9 +541,7 @@ impl<'a> Parser<'a> {
|
|||
this.parse_expr_prefix(attrs)
|
||||
}
|
||||
// Recover from `++x`:
|
||||
token::BinOp(token::Plus)
|
||||
if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) =>
|
||||
{
|
||||
token::Plus if this.look_ahead(1, |t| *t == token::Plus) => {
|
||||
let starts_stmt = this.prev_token == token::Semi
|
||||
|| this.prev_token == token::CloseDelim(Delimiter::Brace);
|
||||
let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
|
||||
|
@ -723,14 +721,12 @@ impl<'a> Parser<'a> {
|
|||
suggestion,
|
||||
})
|
||||
}
|
||||
token::BinOp(token::Shl) => {
|
||||
self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
|
||||
shift: self.token.span,
|
||||
r#type: path,
|
||||
args: args_span,
|
||||
suggestion,
|
||||
})
|
||||
}
|
||||
token::Shl => self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
|
||||
shift: self.token.span,
|
||||
r#type: path,
|
||||
args: args_span,
|
||||
suggestion,
|
||||
}),
|
||||
_ => {
|
||||
// We can end up here even without `<` being the next token, for
|
||||
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
||||
|
@ -2595,7 +2591,7 @@ impl<'a> Parser<'a> {
|
|||
missing_let: None,
|
||||
comparison: None,
|
||||
};
|
||||
if self.prev_token == token::BinOp(token::Or) {
|
||||
if self.prev_token == token::Or {
|
||||
// This was part of a closure, the that part of the parser recover.
|
||||
return Err(self.dcx().create_err(err));
|
||||
} else {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue