Rename ast::TokenKind::Not
as ast::TokenKind::Bang
.
For consistency with `rustc_lexer::TokenKind::Bang`, and because other `ast::TokenKind` variants generally have syntactic names instead of semantic names (e.g. `Star` and `DotDot` instead of `Mul` and `Range`).
This commit is contained in:
parent
2a1e2e9632
commit
53167c0b7f
20 changed files with 48 additions and 48 deletions
|
@ -360,7 +360,7 @@ pub enum TokenKind {
|
|||
/// `||`
|
||||
OrOr,
|
||||
/// `!`
|
||||
Not,
|
||||
Bang,
|
||||
/// `~`
|
||||
Tilde,
|
||||
// `+`
|
||||
|
@ -522,7 +522,7 @@ impl TokenKind {
|
|||
Some(match (self, n) {
|
||||
(Le, 1) => (Lt, Eq),
|
||||
(EqEq, 1) => (Eq, Eq),
|
||||
(Ne, 1) => (Not, Eq),
|
||||
(Ne, 1) => (Bang, Eq),
|
||||
(Ge, 1) => (Gt, Eq),
|
||||
(AndAnd, 1) => (And, And),
|
||||
(OrOr, 1) => (Or, Or),
|
||||
|
@ -604,7 +604,7 @@ impl Token {
|
|||
|
||||
pub fn is_punct(&self) -> bool {
|
||||
match self.kind {
|
||||
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | Plus | Minus
|
||||
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Bang | Tilde | Plus | Minus
|
||||
| Star | Slash | Percent | Caret | And | Or | Shl | Shr | PlusEq | MinusEq | StarEq
|
||||
| SlashEq | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | Dot | DotDot
|
||||
| DotDotDot | DotDotEq | Comma | Semi | Colon | PathSep | RArrow | LArrow
|
||||
|
@ -630,7 +630,7 @@ impl Token {
|
|||
ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
|
||||
OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block
|
||||
Literal(..) | // literal
|
||||
Not | // operator not
|
||||
Bang | // operator not
|
||||
Minus | // unary minus
|
||||
Star | // dereference
|
||||
Or | OrOr | // closure
|
||||
|
@ -701,7 +701,7 @@ impl Token {
|
|||
ident_can_begin_type(name, self.span, is_raw), // type name or keyword
|
||||
OpenDelim(Delimiter::Parenthesis) | // tuple
|
||||
OpenDelim(Delimiter::Bracket) | // array
|
||||
Not | // never
|
||||
Bang | // never
|
||||
Star | // raw pointer
|
||||
And | // reference
|
||||
AndAnd | // double reference
|
||||
|
@ -1004,8 +1004,8 @@ impl Token {
|
|||
(Gt, Ge) => ShrEq,
|
||||
(Gt, _) => return None,
|
||||
|
||||
(Not, Eq) => Ne,
|
||||
(Not, _) => return None,
|
||||
(Bang, Eq) => Ne,
|
||||
(Bang, _) => return None,
|
||||
|
||||
(Plus, Eq) => PlusEq,
|
||||
(Plus, _) => return None,
|
||||
|
|
|
@ -651,7 +651,7 @@ impl TokenStream {
|
|||
if attr_style == AttrStyle::Inner {
|
||||
vec![
|
||||
TokenTree::token_joint(token::Pound, span),
|
||||
TokenTree::token_joint_hidden(token::Not, span),
|
||||
TokenTree::token_joint_hidden(token::Bang, span),
|
||||
body,
|
||||
]
|
||||
} else {
|
||||
|
|
|
@ -317,7 +317,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|
|||
(tt1, Tok(Token { kind: Comma | Semi | Dot, .. }, _)) if !is_punct(tt1) => false,
|
||||
|
||||
// IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if`
|
||||
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _))
|
||||
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Bang, .. }, _))
|
||||
if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) =>
|
||||
{
|
||||
false
|
||||
|
@ -896,7 +896,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
|||
token::Ne => "!=".into(),
|
||||
token::Ge => ">=".into(),
|
||||
token::Gt => ">".into(),
|
||||
token::Not => "!".into(),
|
||||
token::Bang => "!".into(),
|
||||
token::Tilde => "~".into(),
|
||||
token::OrOr => "||".into(),
|
||||
token::AndAnd => "&&".into(),
|
||||
|
|
|
@ -328,7 +328,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||
|
||||
// For inner attributes, we do the same thing for the `!` in `#![attr]`.
|
||||
let mut trees = if cfg_attr.style == AttrStyle::Inner {
|
||||
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _)) =
|
||||
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Bang, .. }, _)) =
|
||||
orig_trees.next()
|
||||
else {
|
||||
panic!("Bad tokens for attribute {cfg_attr:?}");
|
||||
|
|
|
@ -432,7 +432,7 @@ fn check_nested_occurrences(
|
|||
}
|
||||
(
|
||||
NestedMacroState::MacroRules,
|
||||
&TokenTree::Token(Token { kind: TokenKind::Not, .. }),
|
||||
&TokenTree::Token(Token { kind: TokenKind::Bang, .. }),
|
||||
) => {
|
||||
state = NestedMacroState::MacroRulesNot;
|
||||
}
|
||||
|
|
|
@ -690,7 +690,7 @@ fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
|
|||
&& let TokenKind::Ident(ident, _) = ident.kind
|
||||
&& ident == sym::compile_error
|
||||
&& let mbe::TokenTree::Token(bang) = bang
|
||||
&& let TokenKind::Not = bang.kind
|
||||
&& let TokenKind::Bang = bang.kind
|
||||
&& let mbe::TokenTree::Delimited(.., del) = args
|
||||
&& !del.delim.skip()
|
||||
{
|
||||
|
|
|
@ -180,7 +180,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
|||
Gt => op(">"),
|
||||
AndAnd => op("&&"),
|
||||
OrOr => op("||"),
|
||||
Not => op("!"),
|
||||
Bang => op("!"),
|
||||
Tilde => op("~"),
|
||||
Plus => op("+"),
|
||||
Minus => op("-"),
|
||||
|
@ -322,7 +322,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
|||
b'=' => Eq,
|
||||
b'<' => Lt,
|
||||
b'>' => Gt,
|
||||
b'!' => Not,
|
||||
b'!' => Bang,
|
||||
b'~' => Tilde,
|
||||
b'+' => Plus,
|
||||
b'-' => Minus,
|
||||
|
|
|
@ -384,7 +384,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
|||
rustc_lexer::TokenKind::Colon => token::Colon,
|
||||
rustc_lexer::TokenKind::Dollar => token::Dollar,
|
||||
rustc_lexer::TokenKind::Eq => token::Eq,
|
||||
rustc_lexer::TokenKind::Bang => token::Not,
|
||||
rustc_lexer::TokenKind::Bang => token::Bang,
|
||||
rustc_lexer::TokenKind::Lt => token::Lt,
|
||||
rustc_lexer::TokenKind::Gt => token::Gt,
|
||||
rustc_lexer::TokenKind::Minus => token::Minus,
|
||||
|
|
|
@ -312,7 +312,7 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
|||
(",", "Comma", Some(token::Comma)),
|
||||
(";", "Semicolon", Some(token::Semi)),
|
||||
(":", "Colon", Some(token::Colon)),
|
||||
("!", "Exclamation Mark", Some(token::Not)),
|
||||
("!", "Exclamation Mark", Some(token::Bang)),
|
||||
("?", "Question Mark", Some(token::Question)),
|
||||
(".", "Period", Some(token::Dot)),
|
||||
("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
|
||||
|
|
|
@ -130,7 +130,7 @@ impl<'a> Parser<'a> {
|
|||
assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");
|
||||
|
||||
let style =
|
||||
if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
||||
if this.eat(exp!(Bang)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
||||
|
||||
this.expect(exp!(OpenBracket))?;
|
||||
let item = this.parse_attr_item(ForceCollect::No)?;
|
||||
|
@ -312,7 +312,7 @@ impl<'a> Parser<'a> {
|
|||
loop {
|
||||
let start_pos = self.num_bump_calls;
|
||||
// Only try to parse if it is an inner attribute (has `!`).
|
||||
let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) {
|
||||
let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Bang) {
|
||||
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
|
||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||
if attr_style == ast::AttrStyle::Inner {
|
||||
|
|
|
@ -1961,7 +1961,7 @@ impl<'a> Parser<'a> {
|
|||
&mut self,
|
||||
await_sp: Span,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
let (hi, expr, is_question) = if self.token == token::Not {
|
||||
let (hi, expr, is_question) = if self.token == token::Bang {
|
||||
// Handle `await!(<expr>)`.
|
||||
self.recover_await_macro()?
|
||||
} else {
|
||||
|
@ -1973,7 +1973,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
|
||||
self.expect(exp!(Not))?;
|
||||
self.expect(exp!(Bang))?;
|
||||
self.expect(exp!(OpenParen))?;
|
||||
let expr = self.parse_expr()?;
|
||||
self.expect(exp!(CloseParen))?;
|
||||
|
@ -2033,7 +2033,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let is_try = self.token.is_keyword(kw::Try);
|
||||
let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
|
||||
let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for !
|
||||
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
|
||||
|
||||
if is_try && is_questionmark && is_open {
|
||||
|
|
|
@ -505,7 +505,7 @@ impl<'a> Parser<'a> {
|
|||
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
||||
match this.token.uninterpolate().kind {
|
||||
// `!expr`
|
||||
token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
|
||||
token::Bang => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
|
||||
// `~expr`
|
||||
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
|
||||
// `-expr`
|
||||
|
@ -1570,7 +1570,7 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
|
||||
// `!`, as an operator, is prefix, so we know this isn't that.
|
||||
let (span, kind) = if self.eat(exp!(Not)) {
|
||||
let (span, kind) = if self.eat(exp!(Bang)) {
|
||||
// MACRO INVOCATION expression
|
||||
if qself.is_some() {
|
||||
self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
|
||||
|
|
|
@ -382,7 +382,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Are we sure this could not possibly be a macro invocation?
|
||||
fn isnt_macro_invocation(&mut self) -> bool {
|
||||
self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep)
|
||||
self.check_ident() && self.look_ahead(1, |t| *t != token::Bang && *t != token::PathSep)
|
||||
}
|
||||
|
||||
/// Recover on encountering a struct, enum, or method definition where the user
|
||||
|
@ -480,7 +480,7 @@ impl<'a> Parser<'a> {
|
|||
/// Parses an item macro, e.g., `item!();`.
|
||||
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
|
||||
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
|
||||
self.expect(exp!(Not))?; // `!`
|
||||
self.expect(exp!(Bang))?; // `!`
|
||||
match self.parse_delim_args() {
|
||||
// `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
|
||||
Ok(args) => {
|
||||
|
@ -540,7 +540,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn parse_polarity(&mut self) -> ast::ImplPolarity {
|
||||
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
|
||||
if self.check(exp!(Not)) && self.look_ahead(1, |t| t.can_begin_type()) {
|
||||
if self.check(exp!(Bang)) && self.look_ahead(1, |t| t.can_begin_type()) {
|
||||
self.bump(); // `!`
|
||||
ast::ImplPolarity::Negative(self.prev_token.span)
|
||||
} else {
|
||||
|
@ -1579,7 +1579,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
|
||||
if this.token == token::Not {
|
||||
if this.token == token::Bang {
|
||||
if let Err(err) = this.unexpected() {
|
||||
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
|
||||
}
|
||||
|
@ -2034,7 +2034,7 @@ impl<'a> Parser<'a> {
|
|||
attrs: AttrVec,
|
||||
) -> PResult<'a, FieldDef> {
|
||||
let name = self.parse_field_ident(adt_ty, lo)?;
|
||||
if self.token == token::Not {
|
||||
if self.token == token::Bang {
|
||||
if let Err(mut err) = self.unexpected() {
|
||||
// Encounter the macro invocation
|
||||
err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
|
||||
|
@ -2184,7 +2184,7 @@ impl<'a> Parser<'a> {
|
|||
if self.check_keyword(exp!(MacroRules)) {
|
||||
let macro_rules_span = self.token.span;
|
||||
|
||||
if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) {
|
||||
if self.look_ahead(1, |t| *t == token::Bang) && self.look_ahead(2, |t| t.is_ident()) {
|
||||
return IsMacroRulesItem::Yes { has_bang: true };
|
||||
} else if self.look_ahead(1, |t| (t.is_ident())) {
|
||||
// macro_rules foo
|
||||
|
@ -2209,11 +2209,11 @@ impl<'a> Parser<'a> {
|
|||
self.expect_keyword(exp!(MacroRules))?; // `macro_rules`
|
||||
|
||||
if has_bang {
|
||||
self.expect(exp!(Not))?; // `!`
|
||||
self.expect(exp!(Bang))?; // `!`
|
||||
}
|
||||
let ident = self.parse_ident()?;
|
||||
|
||||
if self.eat(exp!(Not)) {
|
||||
if self.eat(exp!(Bang)) {
|
||||
// Handle macro_rules! foo!
|
||||
let span = self.prev_token.span;
|
||||
self.dcx().emit_err(errors::MacroNameRemoveBang { span });
|
||||
|
|
|
@ -767,7 +767,7 @@ impl<'a> Parser<'a> {
|
|||
self.recover_dotdotdot_rest_pat(lo)
|
||||
} else if let Some(form) = self.parse_range_end() {
|
||||
self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
|
||||
} else if self.eat(exp!(Not)) {
|
||||
} else if self.eat(exp!(Bang)) {
|
||||
// Parse `!`
|
||||
self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
|
||||
PatKind::Never
|
||||
|
@ -823,7 +823,7 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
let span = lo.to(self.prev_token.span);
|
||||
|
||||
if qself.is_none() && self.check(exp!(Not)) {
|
||||
if qself.is_none() && self.check(exp!(Bang)) {
|
||||
self.parse_pat_mac_invoc(path)?
|
||||
} else if let Some(form) = self.parse_range_end() {
|
||||
let begin = self.mk_expr(span, ExprKind::Path(qself, path));
|
||||
|
@ -1335,7 +1335,7 @@ impl<'a> Parser<'a> {
|
|||
| token::OpenDelim(Delimiter::Brace) // A struct pattern.
|
||||
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
|
||||
| token::PathSep // A tuple / struct variant pattern.
|
||||
| token::Not)) // A macro expanding to a pattern.
|
||||
| token::Bang)) // A macro expanding to a pattern.
|
||||
}
|
||||
|
||||
/// Parses `ident` or `ident @ pat`.
|
||||
|
|
|
@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
|
|||
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let path = this.parse_path(PathStyle::Expr)?;
|
||||
|
||||
if this.eat(exp!(Not)) {
|
||||
if this.eat(exp!(Bang)) {
|
||||
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
|
||||
return Ok((
|
||||
stmt_mac,
|
||||
|
|
|
@ -2291,7 +2291,7 @@ fn string_to_tts_macro() {
|
|||
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
|
||||
_,
|
||||
),
|
||||
TokenTree::Token(Token { kind: token::Not, .. }, _),
|
||||
TokenTree::Token(Token { kind: token::Bang, .. }, _),
|
||||
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
|
||||
TokenTree::Delimited(.., macro_delim, macro_tts),
|
||||
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
|
||||
|
|
|
@ -25,7 +25,7 @@ pub enum TokenType {
|
|||
Gt,
|
||||
AndAnd,
|
||||
OrOr,
|
||||
Not,
|
||||
Bang,
|
||||
Tilde,
|
||||
|
||||
// BinOps
|
||||
|
@ -172,7 +172,7 @@ impl TokenType {
|
|||
Gt,
|
||||
AndAnd,
|
||||
OrOr,
|
||||
Not,
|
||||
Bang,
|
||||
Tilde,
|
||||
|
||||
Plus,
|
||||
|
@ -366,7 +366,7 @@ impl TokenType {
|
|||
TokenType::Gt => "`>`",
|
||||
TokenType::AndAnd => "`&&`",
|
||||
TokenType::OrOr => "`||`",
|
||||
TokenType::Not => "`!`",
|
||||
TokenType::Bang => "`!`",
|
||||
TokenType::Tilde => "`~`",
|
||||
|
||||
TokenType::Plus => "`+`",
|
||||
|
@ -479,7 +479,7 @@ macro_rules! exp {
|
|||
(Gt) => { exp!(@tok, Gt) };
|
||||
(AndAnd) => { exp!(@tok, AndAnd) };
|
||||
(OrOr) => { exp!(@tok, OrOr) };
|
||||
(Not) => { exp!(@tok, Not) };
|
||||
(Bang) => { exp!(@tok, Bang) };
|
||||
(Tilde) => { exp!(@tok, Tilde) };
|
||||
(Plus) => { exp!(@tok, Plus) };
|
||||
(Minus) => { exp!(@tok, Minus) };
|
||||
|
|
|
@ -260,7 +260,7 @@ impl<'a> Parser<'a> {
|
|||
let mut impl_dyn_multi = false;
|
||||
let kind = if self.check(exp!(OpenParen)) {
|
||||
self.parse_ty_tuple_or_parens(lo, allow_plus)?
|
||||
} else if self.eat(exp!(Not)) {
|
||||
} else if self.eat(exp!(Bang)) {
|
||||
// Never type `!`
|
||||
TyKind::Never
|
||||
} else if self.eat(exp!(Star)) {
|
||||
|
@ -817,7 +817,7 @@ impl<'a> Parser<'a> {
|
|||
) -> PResult<'a, TyKind> {
|
||||
// Simple path
|
||||
let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
|
||||
if self.eat(exp!(Not)) {
|
||||
if self.eat(exp!(Bang)) {
|
||||
// Macro invocation in type position
|
||||
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
|
||||
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
|
||||
|
@ -870,7 +870,7 @@ impl<'a> Parser<'a> {
|
|||
fn can_begin_bound(&mut self) -> bool {
|
||||
self.check_path()
|
||||
|| self.check_lifetime()
|
||||
|| self.check(exp!(Not))
|
||||
|| self.check(exp!(Bang))
|
||||
|| self.check(exp!(Question))
|
||||
|| self.check(exp!(Tilde))
|
||||
|| self.check_keyword(exp!(For))
|
||||
|
@ -1021,7 +1021,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let polarity = if self.eat(exp!(Question)) {
|
||||
BoundPolarity::Maybe(self.prev_token.span)
|
||||
} else if self.eat(exp!(Not)) {
|
||||
} else if self.eat(exp!(Bang)) {
|
||||
self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
|
||||
BoundPolarity::Negative(self.prev_token.span)
|
||||
} else {
|
||||
|
|
|
@ -140,7 +140,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
|||
(DollarParen, token::Plus | token::Star | token::Question) => (false, Other),
|
||||
(DollarParen, _) => (false, DollarParenSep),
|
||||
(DollarParenSep, token::Plus | token::Star) => (false, Other),
|
||||
(Pound, token::Not) => (false, PoundBang),
|
||||
(Pound, token::Bang) => (false, PoundBang),
|
||||
(_, token::Ident(symbol, IdentIsRaw::No))
|
||||
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
|
||||
{
|
||||
|
|
|
@ -1088,7 +1088,7 @@ fn force_space_before(tok: &TokenKind) -> bool {
|
|||
| TokenKind::Gt
|
||||
| TokenKind::AndAnd
|
||||
| TokenKind::OrOr
|
||||
| TokenKind::Not
|
||||
| TokenKind::Bang
|
||||
| TokenKind::Tilde
|
||||
| TokenKind::PlusEq
|
||||
| TokenKind::MinusEq
|
||||
|
@ -1131,7 +1131,7 @@ fn next_space(tok: &TokenKind) -> SpaceState {
|
|||
debug!("next_space: {:?}", tok);
|
||||
|
||||
match tok {
|
||||
TokenKind::Not
|
||||
TokenKind::Bang
|
||||
| TokenKind::And
|
||||
| TokenKind::Tilde
|
||||
| TokenKind::At
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue