1
Fork 0

Remove not used DotEq token

Currently libproc_macro does not use `DotEq` token.
https://github.com/rust-lang/rust/pull/49545 changed libproc_macro
to not generate `DotEq` token.
This commit is contained in:
yui-knk 2018-12-02 22:15:50 +09:00
parent 0765eb95b5
commit 96bf06baf3
6 changed files with 2 additions and 9 deletions

View file

@ -314,7 +314,6 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
token::Token::DotDot | token::Token::DotDot |
token::Token::DotDotDot | token::Token::DotDotDot |
token::Token::DotDotEq | token::Token::DotDotEq |
token::Token::DotEq |
token::Token::Comma | token::Token::Comma |
token::Token::Semi | token::Token::Semi |
token::Token::Colon | token::Token::Colon |

View file

@ -346,7 +346,7 @@ impl<'a> Classifier<'a> {
token::Lifetime(..) => Class::Lifetime, token::Lifetime(..) => Class::Lifetime,
token::Eof | token::Interpolated(..) | token::Eof | token::Interpolated(..) |
token::Tilde | token::At | token::DotEq | token::SingleQuote => Class::None, token::Tilde | token::At| token::SingleQuote => Class::None,
}; };
// Anything that didn't return above is the simple case where we the // Anything that didn't return above is the simple case where we the

View file

@ -703,7 +703,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::At => "At", token::At => "At",
token::Dot => "Dot", token::Dot => "Dot",
token::DotDot => "DotDot", token::DotDot => "DotDot",
token::DotEq => "DotEq",
token::DotDotDot => "DotDotDot", token::DotDotDot => "DotDotDot",
token::DotDotEq => "DotDotEq", token::DotDotEq => "DotDotEq",
token::Comma => "Comma", token::Comma => "Comma",

View file

@ -163,7 +163,6 @@ pub enum Token {
DotDot, DotDot,
DotDotDot, DotDotDot,
DotDotEq, DotDotEq,
DotEq, // HACK(durka42) never produced by the parser, only used for libproc_macro
Comma, Comma,
Semi, Semi,
Colon, Colon,
@ -454,7 +453,6 @@ impl Token {
Dot => match joint { Dot => match joint {
Dot => DotDot, Dot => DotDot,
DotDot => DotDotDot, DotDot => DotDotDot,
DotEq => DotDotEq,
_ => return None, _ => return None,
}, },
DotDot => match joint { DotDot => match joint {
@ -477,7 +475,7 @@ impl Token {
_ => return None, _ => return None,
}, },
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | DotEq | Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot |
DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
Question | OpenDelim(..) | CloseDelim(..) => return None, Question | OpenDelim(..) | CloseDelim(..) => return None,
@ -606,7 +604,6 @@ impl Token {
(&DotDot, &DotDot) | (&DotDot, &DotDot) |
(&DotDotDot, &DotDotDot) | (&DotDotDot, &DotDotDot) |
(&DotDotEq, &DotDotEq) | (&DotDotEq, &DotDotEq) |
(&DotEq, &DotEq) |
(&Comma, &Comma) | (&Comma, &Comma) |
(&Semi, &Semi) | (&Semi, &Semi) |
(&Colon, &Colon) | (&Colon, &Colon) |

View file

@ -210,7 +210,6 @@ pub fn token_to_string(tok: &Token) -> String {
token::DotDot => "..".to_string(), token::DotDot => "..".to_string(),
token::DotDotDot => "...".to_string(), token::DotDotDot => "...".to_string(),
token::DotDotEq => "..=".to_string(), token::DotDotEq => "..=".to_string(),
token::DotEq => ".=".to_string(),
token::Comma => ",".to_string(), token::Comma => ",".to_string(),
token::Semi => ";".to_string(), token::Semi => ";".to_string(),
token::Colon => ":".to_string(), token::Colon => ":".to_string(),

View file

@ -213,7 +213,6 @@ impl FromInternal<(TokenStream, &'_ ParseSess, &'_ mut Vec<Self>)>
}) })
} }
DotEq => op!('.', '='),
OpenDelim(..) | CloseDelim(..) => unreachable!(), OpenDelim(..) | CloseDelim(..) => unreachable!(),
Whitespace | Comment | Shebang(..) | Eof => unreachable!(), Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
} }