Keep the original token in ast::Lit
This commit is contained in:
parent
28b125b83d
commit
f2834a403a
10 changed files with 86 additions and 59 deletions
|
@ -1353,7 +1353,7 @@ pub struct Expr {
|
||||||
|
|
||||||
// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
|
// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||||
#[cfg(target_arch = "x86_64")]
|
#[cfg(target_arch = "x86_64")]
|
||||||
static_assert!(MEM_SIZE_OF_EXPR: std::mem::size_of::<Expr>() == 72);
|
static_assert!(MEM_SIZE_OF_EXPR: std::mem::size_of::<Expr>() == 80);
|
||||||
|
|
||||||
impl Expr {
|
impl Expr {
|
||||||
pub fn precedence(&self) -> ExprPrecedence {
|
pub fn precedence(&self) -> ExprPrecedence {
|
||||||
|
|
|
@ -164,6 +164,8 @@ impl_stable_hash_for!(enum ::syntax::ast::LitIntType {
|
||||||
|
|
||||||
impl_stable_hash_for!(struct ::syntax::ast::Lit {
|
impl_stable_hash_for!(struct ::syntax::ast::Lit {
|
||||||
node,
|
node,
|
||||||
|
token,
|
||||||
|
suffix,
|
||||||
span
|
span
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -284,6 +286,19 @@ for tokenstream::TokenStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl_stable_hash_for!(enum token::Lit {
|
||||||
|
Bool(val),
|
||||||
|
Byte(val),
|
||||||
|
Char(val),
|
||||||
|
Err(val),
|
||||||
|
Integer(val),
|
||||||
|
Float(val),
|
||||||
|
Str_(val),
|
||||||
|
ByteStr(val),
|
||||||
|
StrRaw(val, n),
|
||||||
|
ByteStrRaw(val, n)
|
||||||
|
});
|
||||||
|
|
||||||
fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
||||||
token: &token::Token,
|
token: &token::Token,
|
||||||
hcx: &mut StableHashingContext<'a>,
|
hcx: &mut StableHashingContext<'a>,
|
||||||
|
@ -331,22 +346,8 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
||||||
token::Token::CloseDelim(delim_token) => {
|
token::Token::CloseDelim(delim_token) => {
|
||||||
std_hash::Hash::hash(&delim_token, hasher);
|
std_hash::Hash::hash(&delim_token, hasher);
|
||||||
}
|
}
|
||||||
token::Token::Literal(ref lit, ref opt_name) => {
|
token::Token::Literal(lit, opt_name) => {
|
||||||
mem::discriminant(lit).hash_stable(hcx, hasher);
|
lit.hash_stable(hcx, hasher);
|
||||||
match *lit {
|
|
||||||
token::Lit::Byte(val) |
|
|
||||||
token::Lit::Char(val) |
|
|
||||||
token::Lit::Err(val) |
|
|
||||||
token::Lit::Integer(val) |
|
|
||||||
token::Lit::Float(val) |
|
|
||||||
token::Lit::Str_(val) |
|
|
||||||
token::Lit::ByteStr(val) => val.hash_stable(hcx, hasher),
|
|
||||||
token::Lit::StrRaw(val, n) |
|
|
||||||
token::Lit::ByteStrRaw(val, n) => {
|
|
||||||
val.hash_stable(hcx, hasher);
|
|
||||||
n.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
opt_name.hash_stable(hcx, hasher);
|
opt_name.hash_stable(hcx, hasher);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -318,6 +318,8 @@ impl<'a> Classifier<'a> {
|
||||||
|
|
||||||
// Number literals.
|
// Number literals.
|
||||||
token::Integer(..) | token::Float(..) => Class::Number,
|
token::Integer(..) | token::Float(..) => Class::Number,
|
||||||
|
|
||||||
|
token::Bool(..) => panic!("literal token contains `Lit::Bool`"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ pub use crate::symbol::{Ident, Symbol as Name};
|
||||||
pub use crate::util::parser::ExprPrecedence;
|
pub use crate::util::parser::ExprPrecedence;
|
||||||
|
|
||||||
use crate::ext::hygiene::{Mark, SyntaxContext};
|
use crate::ext::hygiene::{Mark, SyntaxContext};
|
||||||
|
use crate::parse::token;
|
||||||
use crate::print::pprust;
|
use crate::print::pprust;
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::source_map::{dummy_spanned, respan, Spanned};
|
use crate::source_map::{dummy_spanned, respan, Spanned};
|
||||||
|
@ -1354,6 +1355,8 @@ pub enum StrStyle {
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Hash, PartialEq)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Hash, PartialEq)]
|
||||||
pub struct Lit {
|
pub struct Lit {
|
||||||
pub node: LitKind,
|
pub node: LitKind,
|
||||||
|
pub token: token::Lit,
|
||||||
|
pub suffix: Option<Symbol>,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -350,7 +350,9 @@ impl Attribute {
|
||||||
/* Constructors */
|
/* Constructors */
|
||||||
|
|
||||||
pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
|
pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
|
||||||
let value = Lit { node: LitKind::Str(value.node, ast::StrStyle::Cooked), span: value.span };
|
let node = LitKind::Str(value.node, ast::StrStyle::Cooked);
|
||||||
|
let (token, suffix) = node.lit_token();
|
||||||
|
let value = Lit { node, token, suffix, span: value.span };
|
||||||
mk_name_value_item(ident.span.to(value.span), ident, value)
|
mk_name_value_item(ident.span.to(value.span), ident, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -417,7 +419,9 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute
|
||||||
|
|
||||||
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
|
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
|
||||||
let style = doc_comment_style(&text.as_str());
|
let style = doc_comment_style(&text.as_str());
|
||||||
let lit = Lit { node: LitKind::Str(text, ast::StrStyle::Cooked), span };
|
let node = LitKind::Str(text, ast::StrStyle::Cooked);
|
||||||
|
let (token, suffix) = node.lit_token();
|
||||||
|
let lit = Lit { node, token, suffix, span };
|
||||||
Attribute {
|
Attribute {
|
||||||
id,
|
id,
|
||||||
style,
|
style,
|
||||||
|
@ -562,7 +566,7 @@ impl MetaItemKind {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
||||||
LitKind::from_token(token)
|
LitKind::from_token(token)
|
||||||
.map(|node| MetaItemKind::NameValue(Lit { node, span }))
|
.map(|(node, token, suffix)| MetaItemKind::NameValue(Lit { node, token, suffix, span }))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -607,9 +611,9 @@ impl NestedMetaItem {
|
||||||
where I: Iterator<Item = TokenTree>,
|
where I: Iterator<Item = TokenTree>,
|
||||||
{
|
{
|
||||||
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
||||||
if let Some(node) = LitKind::from_token(token) {
|
if let Some((node, token, suffix)) = LitKind::from_token(token) {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return Some(NestedMetaItem::Literal(Lit { node, span }));
|
return Some(NestedMetaItem::Literal(Lit { node, token, suffix, span }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -625,28 +629,35 @@ impl Lit {
|
||||||
|
|
||||||
impl LitKind {
|
impl LitKind {
|
||||||
fn token(&self) -> Token {
|
fn token(&self) -> Token {
|
||||||
|
match self.lit_token() {
|
||||||
|
(token::Bool(symbol), _) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
|
||||||
|
(lit, suffix) => Token::Literal(lit, suffix),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn lit_token(&self) -> (token::Lit, Option<Symbol>) {
|
||||||
use std::ascii;
|
use std::ascii;
|
||||||
|
|
||||||
match *self {
|
match *self {
|
||||||
LitKind::Str(string, ast::StrStyle::Cooked) => {
|
LitKind::Str(string, ast::StrStyle::Cooked) => {
|
||||||
let escaped = string.as_str().escape_default().to_string();
|
let escaped = string.as_str().escape_default().to_string();
|
||||||
Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)
|
(token::Lit::Str_(Symbol::intern(&escaped)), None)
|
||||||
}
|
}
|
||||||
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
|
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
|
||||||
Token::Literal(token::Lit::StrRaw(string, n), None)
|
(token::Lit::StrRaw(string, n), None)
|
||||||
}
|
}
|
||||||
LitKind::ByteStr(ref bytes) => {
|
LitKind::ByteStr(ref bytes) => {
|
||||||
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
|
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
|
||||||
.map(Into::<char>::into).collect::<String>();
|
.map(Into::<char>::into).collect::<String>();
|
||||||
Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None)
|
(token::Lit::ByteStr(Symbol::intern(&string)), None)
|
||||||
}
|
}
|
||||||
LitKind::Byte(byte) => {
|
LitKind::Byte(byte) => {
|
||||||
let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
|
let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
|
||||||
Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None)
|
(token::Lit::Byte(Symbol::intern(&string)), None)
|
||||||
}
|
}
|
||||||
LitKind::Char(ch) => {
|
LitKind::Char(ch) => {
|
||||||
let string: String = ch.escape_default().map(Into::<char>::into).collect();
|
let string: String = ch.escape_default().map(Into::<char>::into).collect();
|
||||||
Token::Literal(token::Lit::Char(Symbol::intern(&string)), None)
|
(token::Lit::Char(Symbol::intern(&string)), None)
|
||||||
}
|
}
|
||||||
LitKind::Int(n, ty) => {
|
LitKind::Int(n, ty) => {
|
||||||
let suffix = match ty {
|
let suffix = match ty {
|
||||||
|
@ -654,38 +665,39 @@ impl LitKind {
|
||||||
ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
||||||
ast::LitIntType::Unsuffixed => None,
|
ast::LitIntType::Unsuffixed => None,
|
||||||
};
|
};
|
||||||
Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
|
(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
|
||||||
}
|
}
|
||||||
LitKind::Float(symbol, ty) => {
|
LitKind::Float(symbol, ty) => {
|
||||||
Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
|
(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
|
||||||
}
|
}
|
||||||
LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
|
LitKind::FloatUnsuffixed(symbol) => (token::Lit::Float(symbol), None),
|
||||||
LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
|
LitKind::Bool(value) => {
|
||||||
"true"
|
let kw = if value { keywords::True } else { keywords::False };
|
||||||
} else {
|
(token::Lit::Bool(kw.name()), None)
|
||||||
"false"
|
}
|
||||||
})), false),
|
LitKind::Err(val) => (token::Lit::Err(val), None),
|
||||||
LitKind::Err(val) => Token::Literal(token::Lit::Err(val), None),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_token(token: Token) -> Option<LitKind> {
|
fn from_token(token: Token) -> Option<(LitKind, token::Lit, Option<Symbol>)> {
|
||||||
match token {
|
match token {
|
||||||
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
|
Token::Ident(ident, false) if ident.name == keywords::True.name() =>
|
||||||
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
|
Some((LitKind::Bool(true), token::Bool(ident.name), None)),
|
||||||
|
Token::Ident(ident, false) if ident.name == keywords::False.name() =>
|
||||||
|
Some((LitKind::Bool(false), token::Bool(ident.name), None)),
|
||||||
Token::Interpolated(nt) => match *nt {
|
Token::Interpolated(nt) => match *nt {
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
||||||
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
|
ExprKind::Lit(ref lit) => Some((lit.node.clone(), lit.token, lit.suffix)),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
Token::Literal(lit, suf) => {
|
Token::Literal(lit, suf) => {
|
||||||
let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
|
let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
|
||||||
if suffix_illegal && suf.is_some() {
|
if result.is_none() || suffix_illegal && suf.is_some() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
result
|
Some((result.unwrap(), lit, suf))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -698,7 +698,8 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_lit(&self, span: Span, node: ast::LitKind) -> P<ast::Expr> {
|
fn expr_lit(&self, span: Span, node: ast::LitKind) -> P<ast::Expr> {
|
||||||
self.expr(span, ast::ExprKind::Lit(ast::Lit { node, span }))
|
let (token, suffix) = node.lit_token();
|
||||||
|
self.expr(span, ast::ExprKind::Lit(ast::Lit { node, token, suffix, span }))
|
||||||
}
|
}
|
||||||
fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
||||||
self.expr_lit(span, ast::LitKind::Int(i as u128,
|
self.expr_lit(span, ast::LitKind::Int(i as u128,
|
||||||
|
@ -1166,8 +1167,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
|
|
||||||
fn meta_name_value(&self, span: Span, name: ast::Name, node: ast::LitKind)
|
fn meta_name_value(&self, span: Span, name: ast::Name, node: ast::LitKind)
|
||||||
-> ast::MetaItem {
|
-> ast::MetaItem {
|
||||||
|
let (token, suffix) = node.lit_token();
|
||||||
attr::mk_name_value_item(span, Ident::with_empty_ctxt(name).with_span_pos(span),
|
attr::mk_name_value_item(span, Ident::with_empty_ctxt(name).with_span_pos(span),
|
||||||
ast::Lit { node, span })
|
ast::Lit { node, token, suffix, span })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_use(&self, sp: Span,
|
fn item_use(&self, sp: Span,
|
||||||
|
|
|
@ -376,6 +376,7 @@ crate fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Ha
|
||||||
use ast::LitKind;
|
use ast::LitKind;
|
||||||
|
|
||||||
match lit {
|
match lit {
|
||||||
|
token::Bool(_) => panic!("literal token contains `Lit::Bool`"),
|
||||||
token::Byte(i) => {
|
token::Byte(i) => {
|
||||||
let lit_kind = match unescape_byte(&i.as_str()) {
|
let lit_kind = match unescape_byte(&i.as_str()) {
|
||||||
Ok(c) => LitKind::Byte(c),
|
Ok(c) => LitKind::Byte(c),
|
||||||
|
|
|
@ -2070,11 +2070,11 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `token_lit = LIT_INTEGER | ...`.
|
/// Matches `token_lit = LIT_INTEGER | ...`.
|
||||||
fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
|
fn parse_lit_token(&mut self) -> PResult<'a, (LitKind, token::Lit, Option<Symbol>)> {
|
||||||
let out = match self.token {
|
let out = match self.token {
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
||||||
ExprKind::Lit(ref lit) => { lit.node.clone() }
|
ExprKind::Lit(ref lit) => { (lit.node.clone(), lit.token, lit.suffix) }
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
_ => { return self.unexpected_last(&self.token); }
|
||||||
},
|
},
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
_ => { return self.unexpected_last(&self.token); }
|
||||||
|
@ -2088,19 +2088,19 @@ impl<'a> Parser<'a> {
|
||||||
self.expect_no_suffix(sp, &format!("a {}", lit.literal_name()), suf)
|
self.expect_no_suffix(sp, &format!("a {}", lit.literal_name()), suf)
|
||||||
}
|
}
|
||||||
|
|
||||||
result.unwrap()
|
(result.unwrap(), lit, suf)
|
||||||
}
|
}
|
||||||
token::Dot if self.look_ahead(1, |t| match t {
|
token::Dot if self.look_ahead(1, |t| match t {
|
||||||
token::Literal(parse::token::Lit::Integer(_) , _) => true,
|
token::Literal(token::Lit::Integer(_) , _) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}) => { // recover from `let x = .4;`
|
}) => { // recover from `let x = .4;`
|
||||||
let lo = self.span;
|
let lo = self.span;
|
||||||
self.bump();
|
self.bump();
|
||||||
if let token::Literal(
|
if let token::Literal(
|
||||||
parse::token::Lit::Integer(val),
|
token::Lit::Integer(val),
|
||||||
suffix,
|
suffix,
|
||||||
) = self.token {
|
) = self.token {
|
||||||
let suffix = suffix.and_then(|s| {
|
let float_suffix = suffix.and_then(|s| {
|
||||||
let s = s.as_str();
|
let s = s.as_str();
|
||||||
if s == "f32" {
|
if s == "f32" {
|
||||||
Some("f32")
|
Some("f32")
|
||||||
|
@ -2117,14 +2117,14 @@ impl<'a> Parser<'a> {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
sp,
|
sp,
|
||||||
"must have an integer part",
|
"must have an integer part",
|
||||||
format!("0.{}{}", val, suffix),
|
format!("0.{}{}", val, float_suffix),
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
);
|
);
|
||||||
err.emit();
|
err.emit();
|
||||||
return Ok(match suffix {
|
return Ok(match float_suffix {
|
||||||
"f32" => ast::LitKind::Float(val, ast::FloatTy::F32),
|
"f32" => (ast::LitKind::Float(val, ast::FloatTy::F32), token::Float(val), suffix),
|
||||||
"f64" => ast::LitKind::Float(val, ast::FloatTy::F64),
|
"f64" => (ast::LitKind::Float(val, ast::FloatTy::F64), token::Float(val), suffix),
|
||||||
_ => ast::LitKind::FloatUnsuffixed(val),
|
_ => (ast::LitKind::FloatUnsuffixed(val), token::Float(val), suffix),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
|
@ -2140,14 +2140,14 @@ impl<'a> Parser<'a> {
|
||||||
/// Matches `lit = true | false | token_lit`.
|
/// Matches `lit = true | false | token_lit`.
|
||||||
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
||||||
let lo = self.span;
|
let lo = self.span;
|
||||||
let node = if self.eat_keyword(keywords::True) {
|
let (node, token, suffix) = if self.eat_keyword(keywords::True) {
|
||||||
LitKind::Bool(true)
|
(LitKind::Bool(true), token::Bool(keywords::True.name()), None)
|
||||||
} else if self.eat_keyword(keywords::False) {
|
} else if self.eat_keyword(keywords::False) {
|
||||||
LitKind::Bool(false)
|
(LitKind::Bool(false), token::Bool(keywords::False.name()), None)
|
||||||
} else {
|
} else {
|
||||||
self.parse_lit_token()?
|
self.parse_lit_token()?
|
||||||
};
|
};
|
||||||
Ok(Lit { node, span: lo.to(self.prev_span) })
|
Ok(Lit { node, token, suffix, span: lo.to(self.prev_span) })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
||||||
|
|
|
@ -61,6 +61,7 @@ impl DelimToken {
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
pub enum Lit {
|
pub enum Lit {
|
||||||
|
Bool(ast::Name), // AST only, must never appear in a `Token`
|
||||||
Byte(ast::Name),
|
Byte(ast::Name),
|
||||||
Char(ast::Name),
|
Char(ast::Name),
|
||||||
Err(ast::Name),
|
Err(ast::Name),
|
||||||
|
@ -72,9 +73,13 @@ pub enum Lit {
|
||||||
ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
|
ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_arch = "x86_64")]
|
||||||
|
static_assert!(MEM_SIZE_OF_LIT: mem::size_of::<Lit>() == 8);
|
||||||
|
|
||||||
impl Lit {
|
impl Lit {
|
||||||
crate fn literal_name(&self) -> &'static str {
|
crate fn literal_name(&self) -> &'static str {
|
||||||
match *self {
|
match *self {
|
||||||
|
Bool(_) => panic!("literal token contains `Lit::Bool`"),
|
||||||
Byte(_) => "byte literal",
|
Byte(_) => "byte literal",
|
||||||
Char(_) => "char literal",
|
Char(_) => "char literal",
|
||||||
Err(_) => "invalid literal",
|
Err(_) => "invalid literal",
|
||||||
|
|
|
@ -225,6 +225,7 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||||
/* Literals */
|
/* Literals */
|
||||||
token::Literal(lit, suf) => {
|
token::Literal(lit, suf) => {
|
||||||
let mut out = match lit {
|
let mut out = match lit {
|
||||||
|
token::Bool(_) => panic!("literal token contains `Lit::Bool`"),
|
||||||
token::Byte(b) => format!("b'{}'", b),
|
token::Byte(b) => format!("b'{}'", b),
|
||||||
token::Char(c) => format!("'{}'", c),
|
token::Char(c) => format!("'{}'", c),
|
||||||
token::Err(c) => format!("'{}'", c),
|
token::Err(c) => format!("'{}'", c),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue