Simplify conversions between tokens and semantic literals
This commit is contained in:
parent
a5b3f33cb9
commit
8739668438
10 changed files with 260 additions and 325 deletions
|
@ -5,7 +5,7 @@ use syntax::parse::ParseSess;
|
||||||
use syntax::parse::lexer::comments;
|
use syntax::parse::lexer::comments;
|
||||||
use syntax::print::pp::{self, Breaks};
|
use syntax::print::pp::{self, Breaks};
|
||||||
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
|
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
|
||||||
use syntax::print::pprust::PrintState;
|
use syntax::print::pprust::{self, PrintState};
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::keywords;
|
use syntax::symbol::keywords;
|
||||||
use syntax::util::parser::{self, AssocOp, Fixity};
|
use syntax::util::parser::{self, AssocOp, Fixity};
|
||||||
|
@ -15,7 +15,6 @@ use crate::hir;
|
||||||
use crate::hir::{PatKind, GenericBound, TraitBoundModifier, RangeEnd};
|
use crate::hir::{PatKind, GenericBound, TraitBoundModifier, RangeEnd};
|
||||||
use crate::hir::{GenericParam, GenericParamKind, GenericArg};
|
use crate::hir::{GenericParam, GenericParamKind, GenericArg};
|
||||||
|
|
||||||
use std::ascii;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::io::{self, Write, Read};
|
use std::io::{self, Write, Read};
|
||||||
|
@ -1251,57 +1250,8 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
fn print_literal(&mut self, lit: &hir::Lit) -> io::Result<()> {
|
fn print_literal(&mut self, lit: &hir::Lit) -> io::Result<()> {
|
||||||
self.maybe_print_comment(lit.span.lo())?;
|
self.maybe_print_comment(lit.span.lo())?;
|
||||||
match lit.node {
|
let (token, suffix) = lit.node.to_lit_token();
|
||||||
hir::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
|
self.writer().word(pprust::literal_to_string(token, suffix))
|
||||||
hir::LitKind::Err(st) => {
|
|
||||||
let st = st.as_str().escape_debug().to_string();
|
|
||||||
let mut res = String::with_capacity(st.len() + 2);
|
|
||||||
res.push('\'');
|
|
||||||
res.push_str(&st);
|
|
||||||
res.push('\'');
|
|
||||||
self.writer().word(res)
|
|
||||||
}
|
|
||||||
hir::LitKind::Byte(byte) => {
|
|
||||||
let mut res = String::from("b'");
|
|
||||||
res.extend(ascii::escape_default(byte).map(|c| c as char));
|
|
||||||
res.push('\'');
|
|
||||||
self.writer().word(res)
|
|
||||||
}
|
|
||||||
hir::LitKind::Char(ch) => {
|
|
||||||
let mut res = String::from("'");
|
|
||||||
res.extend(ch.escape_default());
|
|
||||||
res.push('\'');
|
|
||||||
self.writer().word(res)
|
|
||||||
}
|
|
||||||
hir::LitKind::Int(i, t) => {
|
|
||||||
match t {
|
|
||||||
ast::LitIntType::Signed(st) => {
|
|
||||||
self.writer().word(st.val_to_string(i as i128))
|
|
||||||
}
|
|
||||||
ast::LitIntType::Unsigned(ut) => {
|
|
||||||
self.writer().word(ut.val_to_string(i))
|
|
||||||
}
|
|
||||||
ast::LitIntType::Unsuffixed => {
|
|
||||||
self.writer().word(i.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
hir::LitKind::Float(ref f, t) => {
|
|
||||||
self.writer().word(format!("{}{}", &f, t.ty_to_string()))
|
|
||||||
}
|
|
||||||
hir::LitKind::FloatUnsuffixed(ref f) => self.writer().word(f.as_str().to_string()),
|
|
||||||
hir::LitKind::Bool(val) => {
|
|
||||||
if val { self.writer().word("true") } else { self.writer().word("false") }
|
|
||||||
}
|
|
||||||
hir::LitKind::ByteStr(ref v) => {
|
|
||||||
let mut escaped: String = String::new();
|
|
||||||
for &ch in v.iter() {
|
|
||||||
escaped.extend(ascii::escape_default(ch)
|
|
||||||
.map(|c| c as char));
|
|
||||||
}
|
|
||||||
self.writer().word(format!("b\"{}\"", escaped))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
|
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
|
||||||
|
|
|
@ -591,13 +591,10 @@ mod test {
|
||||||
let mi = dummy_meta_item_word("all");
|
let mi = dummy_meta_item_word("all");
|
||||||
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
|
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
|
||||||
|
|
||||||
let node = LitKind::Str(Symbol::intern("done"), StrStyle::Cooked);
|
let mi = attr::mk_name_value_item_str(
|
||||||
let (token, suffix) = node.lit_token();
|
Ident::from_str("all"),
|
||||||
let mi = MetaItem {
|
dummy_spanned(Symbol::intern("done"))
|
||||||
path: Path::from_ident(Ident::from_str("all")),
|
);
|
||||||
node: MetaItemKind::NameValue(Lit { node, token, suffix, span: DUMMY_SP }),
|
|
||||||
span: DUMMY_SP,
|
|
||||||
};
|
|
||||||
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
|
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
|
||||||
|
|
||||||
let mi = dummy_meta_item_list!(all, [a, b]);
|
let mi = dummy_meta_item_list!(all, [a, b]);
|
||||||
|
@ -625,13 +622,12 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_err() {
|
fn test_parse_err() {
|
||||||
with_globals(|| {
|
with_globals(|| {
|
||||||
let node = LitKind::Bool(false);
|
let mi = attr::mk_name_value_item(
|
||||||
let (token, suffix) = node.lit_token();
|
DUMMY_SP,
|
||||||
let mi = MetaItem {
|
Ident::from_str("foo"),
|
||||||
path: Path::from_ident(Ident::from_str("foo")),
|
LitKind::Bool(false),
|
||||||
node: MetaItemKind::NameValue(Lit { node, token, suffix, span: DUMMY_SP }),
|
DUMMY_SP,
|
||||||
span: DUMMY_SP,
|
);
|
||||||
};
|
|
||||||
assert!(Cfg::parse(&mi).is_err());
|
assert!(Cfg::parse(&mi).is_err());
|
||||||
|
|
||||||
let mi = dummy_meta_item_list!(not, [a, b]);
|
let mi = dummy_meta_item_list!(not, [a, b]);
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub use StabilityLevel::*;
|
||||||
use crate::ast;
|
use crate::ast;
|
||||||
use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
|
use crate::ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment};
|
||||||
use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem};
|
use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem};
|
||||||
use crate::ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam};
|
use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam};
|
||||||
use crate::mut_visit::visit_clobber;
|
use crate::mut_visit::visit_clobber;
|
||||||
use crate::source_map::{BytePos, Spanned, dummy_spanned};
|
use crate::source_map::{BytePos, Spanned, dummy_spanned};
|
||||||
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||||
|
@ -27,9 +27,11 @@ use crate::ThinVec;
|
||||||
use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
|
use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
|
||||||
use crate::GLOBALS;
|
use crate::GLOBALS;
|
||||||
|
|
||||||
|
use errors::Handler;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use syntax_pos::{FileName, Span};
|
use syntax_pos::{FileName, Span};
|
||||||
|
|
||||||
|
use std::ascii;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::ops::DerefMut;
|
use std::ops::DerefMut;
|
||||||
|
|
||||||
|
@ -350,14 +352,13 @@ impl Attribute {
|
||||||
/* Constructors */
|
/* Constructors */
|
||||||
|
|
||||||
pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
|
pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
|
||||||
let node = LitKind::Str(value.node, ast::StrStyle::Cooked);
|
let lit_kind = LitKind::Str(value.node, ast::StrStyle::Cooked);
|
||||||
let (token, suffix) = node.lit_token();
|
mk_name_value_item(ident.span.to(value.span), ident, lit_kind, value.span)
|
||||||
let value = Lit { node, token, suffix, span: value.span };
|
|
||||||
mk_name_value_item(ident.span.to(value.span), ident, value)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_name_value_item(span: Span, ident: Ident, value: Lit) -> MetaItem {
|
pub fn mk_name_value_item(span: Span, ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
|
||||||
MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(value) }
|
let lit = Lit::from_lit_kind(lit_kind, lit_span);
|
||||||
|
MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(lit) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
|
pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
|
||||||
|
@ -419,9 +420,8 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute
|
||||||
|
|
||||||
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
|
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
|
||||||
let style = doc_comment_style(&text.as_str());
|
let style = doc_comment_style(&text.as_str());
|
||||||
let node = LitKind::Str(text, ast::StrStyle::Cooked);
|
let lit_kind = LitKind::Str(text, ast::StrStyle::Cooked);
|
||||||
let (token, suffix) = node.lit_token();
|
let lit = Lit::from_lit_kind(lit_kind, span);
|
||||||
let lit = Lit { node, token, suffix, span };
|
|
||||||
Attribute {
|
Attribute {
|
||||||
id,
|
id,
|
||||||
style,
|
style,
|
||||||
|
@ -565,9 +565,7 @@ impl MetaItemKind {
|
||||||
Some(TokenTree::Token(_, token::Eq)) => {
|
Some(TokenTree::Token(_, token::Eq)) => {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
||||||
LitKind::from_token(token).map(|(node, token, suffix)| {
|
Lit::from_token(&token, span, None).map(MetaItemKind::NameValue)
|
||||||
MetaItemKind::NameValue(Lit { node, token, suffix, span })
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -612,9 +610,9 @@ impl NestedMetaItem {
|
||||||
where I: Iterator<Item = TokenTree>,
|
where I: Iterator<Item = TokenTree>,
|
||||||
{
|
{
|
||||||
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
||||||
if let Some((node, token, suffix)) = LitKind::from_token(token) {
|
if let Some(lit) = Lit::from_token(&token, span, None) {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return Some(NestedMetaItem::Literal(Lit { node, token, suffix, span }));
|
return Some(NestedMetaItem::Literal(lit));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -624,21 +622,19 @@ impl NestedMetaItem {
|
||||||
|
|
||||||
impl Lit {
|
impl Lit {
|
||||||
crate fn tokens(&self) -> TokenStream {
|
crate fn tokens(&self) -> TokenStream {
|
||||||
TokenTree::Token(self.span, self.node.token()).into()
|
let token = match self.token {
|
||||||
|
token::Bool(symbol) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
|
||||||
|
token => Token::Literal(token, self.suffix),
|
||||||
|
};
|
||||||
|
TokenTree::Token(self.span, token).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LitKind {
|
impl LitKind {
|
||||||
fn token(&self) -> Token {
|
/// Attempts to recover a token from semantic literal.
|
||||||
match self.lit_token() {
|
/// This function is used when the original token doesn't exist (e.g. the literal is created
|
||||||
(token::Bool(symbol), _) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
|
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
|
||||||
(lit, suffix) => Token::Literal(lit, suffix),
|
pub fn to_lit_token(&self) -> (token::Lit, Option<Symbol>) {
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lit_token(&self) -> (token::Lit, Option<Symbol>) {
|
|
||||||
use std::ascii;
|
|
||||||
|
|
||||||
match *self {
|
match *self {
|
||||||
LitKind::Str(string, ast::StrStyle::Cooked) => {
|
LitKind::Str(string, ast::StrStyle::Cooked) => {
|
||||||
let escaped = string.as_str().escape_default().to_string();
|
let escaped = string.as_str().escape_default().to_string();
|
||||||
|
@ -679,29 +675,45 @@ impl LitKind {
|
||||||
LitKind::Err(val) => (token::Lit::Err(val), None),
|
LitKind::Err(val) => (token::Lit::Err(val), None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn from_token(token: Token) -> Option<(LitKind, token::Lit, Option<Symbol>)> {
|
impl Lit {
|
||||||
match token {
|
/// Converts literal token with a suffix into an AST literal.
|
||||||
Token::Ident(ident, false) if ident.name == keywords::True.name() =>
|
/// Works speculatively and may return `None` is diagnostic handler is not passed.
|
||||||
Some((LitKind::Bool(true), token::Bool(ident.name), None)),
|
/// If diagnostic handler is passed, may return `Some`,
|
||||||
Token::Ident(ident, false) if ident.name == keywords::False.name() =>
|
/// possibly after reporting non-fatal errors and recovery, or `None` for irrecoverable errors.
|
||||||
Some((LitKind::Bool(false), token::Bool(ident.name), None)),
|
crate fn from_token(
|
||||||
Token::Interpolated(nt) => match *nt {
|
token: &token::Token,
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
span: Span,
|
||||||
ExprKind::Lit(ref lit) => Some((lit.node.clone(), lit.token, lit.suffix)),
|
diag: Option<(Span, &Handler)>,
|
||||||
_ => None,
|
) -> Option<Lit> {
|
||||||
},
|
let (token, suffix) = match *token {
|
||||||
_ => None,
|
token::Ident(ident, false) if ident.name == keywords::True.name() ||
|
||||||
},
|
ident.name == keywords::False.name() =>
|
||||||
Token::Literal(lit, suf) => {
|
(token::Bool(ident.name), None),
|
||||||
let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
|
token::Literal(token, suffix) =>
|
||||||
if result.is_none() || suffix_illegal && suf.is_some() {
|
(token, suffix),
|
||||||
return None;
|
token::Interpolated(ref nt) => {
|
||||||
|
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
|
||||||
|
if let ast::ExprKind::Lit(lit) = &expr.node {
|
||||||
|
return Some(lit.clone());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Some((result.unwrap(), lit, suf))
|
return None;
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => return None,
|
||||||
}
|
};
|
||||||
|
|
||||||
|
let node = LitKind::from_lit_token(token, suffix, diag)?;
|
||||||
|
Some(Lit { node, token, suffix, span })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempts to recover an AST literal from semantic literal.
|
||||||
|
/// This function is used when the original token doesn't exist (e.g. the literal is created
|
||||||
|
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
|
||||||
|
pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
|
||||||
|
let (token, suffix) = node.to_lit_token();
|
||||||
|
Lit { node, token, suffix, span }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -697,9 +697,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
self.expr_struct(span, self.path_ident(span, id), fields)
|
self.expr_struct(span, self.path_ident(span, id), fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_lit(&self, span: Span, node: ast::LitKind) -> P<ast::Expr> {
|
fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
|
||||||
let (token, suffix) = node.lit_token();
|
let lit = ast::Lit::from_lit_kind(lit_kind, span);
|
||||||
self.expr(span, ast::ExprKind::Lit(ast::Lit { node, token, suffix, span }))
|
self.expr(span, ast::ExprKind::Lit(lit))
|
||||||
}
|
}
|
||||||
fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
||||||
self.expr_lit(span, ast::LitKind::Int(i as u128,
|
self.expr_lit(span, ast::LitKind::Int(i as u128,
|
||||||
|
@ -1165,11 +1165,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis)
|
attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn meta_name_value(&self, span: Span, name: ast::Name, node: ast::LitKind)
|
fn meta_name_value(&self, span: Span, name: ast::Name, lit_kind: ast::LitKind)
|
||||||
-> ast::MetaItem {
|
-> ast::MetaItem {
|
||||||
let (token, suffix) = node.lit_token();
|
|
||||||
attr::mk_name_value_item(span, Ident::with_empty_ctxt(name).with_span_pos(span),
|
attr::mk_name_value_item(span, Ident::with_empty_ctxt(name).with_span_pos(span),
|
||||||
ast::Lit { node, token, suffix, span })
|
lit_kind, span)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_use(&self, sp: Span,
|
fn item_use(&self, sp: Span,
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
//! The main parser interface.
|
//! The main parser interface.
|
||||||
|
|
||||||
use crate::ast::{self, CrateConfig, NodeId};
|
use crate::ast::{self, CrateConfig, LitKind, NodeId};
|
||||||
use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
|
use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
|
||||||
use crate::source_map::{SourceMap, FilePathMapping};
|
use crate::source_map::{SourceMap, FilePathMapping};
|
||||||
use crate::feature_gate::UnstableFeatures;
|
use crate::feature_gate::UnstableFeatures;
|
||||||
use crate::parse::parser::Parser;
|
use crate::parse::parser::Parser;
|
||||||
use crate::symbol::Symbol;
|
use crate::symbol::{keywords, Symbol};
|
||||||
use crate::syntax::parse::parser::emit_unclosed_delims;
|
use crate::syntax::parse::parser::emit_unclosed_delims;
|
||||||
use crate::tokenstream::{TokenStream, TokenTree};
|
use crate::tokenstream::{TokenStream, TokenTree};
|
||||||
use crate::diagnostics::plugin::ErrorMap;
|
use crate::diagnostics::plugin::ErrorMap;
|
||||||
|
@ -371,97 +371,151 @@ macro_rules! err {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
crate fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
crate fn expect_no_suffix(sp: Span, diag: &Handler, kind: &str, suffix: Option<ast::Name>) {
|
||||||
-> (bool /* suffix illegal? */, Option<ast::LitKind>) {
|
match suffix {
|
||||||
use ast::LitKind;
|
None => {/* everything ok */}
|
||||||
|
Some(suf) => {
|
||||||
match lit {
|
let text = suf.as_str();
|
||||||
token::Bool(_) => panic!("literal token contains `Lit::Bool`"),
|
if text.is_empty() {
|
||||||
token::Byte(i) => {
|
diag.span_bug(sp, "found empty literal suffix in Some")
|
||||||
let lit_kind = match unescape_byte(&i.as_str()) {
|
}
|
||||||
Ok(c) => LitKind::Byte(c),
|
let mut err = if kind == "a tuple index" &&
|
||||||
Err(_) => LitKind::Err(i),
|
["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
|
||||||
|
{
|
||||||
|
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
||||||
|
// through the ecosystem when people fix their macros
|
||||||
|
let mut err = diag.struct_span_warn(
|
||||||
|
sp,
|
||||||
|
&format!("suffixes on {} are invalid", kind),
|
||||||
|
);
|
||||||
|
err.note(&format!(
|
||||||
|
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
||||||
|
incorrectly accepted on stable for a few releases",
|
||||||
|
text,
|
||||||
|
));
|
||||||
|
err.help(
|
||||||
|
"on proc macros, you'll want to use `syn::Index::from` or \
|
||||||
|
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
||||||
|
to tuple field access",
|
||||||
|
);
|
||||||
|
err.note(
|
||||||
|
"for more context, see https://github.com/rust-lang/rust/issues/60210",
|
||||||
|
);
|
||||||
|
err
|
||||||
|
} else {
|
||||||
|
diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
|
||||||
};
|
};
|
||||||
(true, Some(lit_kind))
|
err.span_label(sp, format!("invalid suffix `{}`", text));
|
||||||
},
|
err.emit();
|
||||||
token::Char(i) => {
|
}
|
||||||
let lit_kind = match unescape_char(&i.as_str()) {
|
}
|
||||||
Ok(c) => LitKind::Char(c),
|
}
|
||||||
Err(_) => LitKind::Err(i),
|
|
||||||
};
|
|
||||||
(true, Some(lit_kind))
|
|
||||||
},
|
|
||||||
token::Err(i) => (true, Some(LitKind::Err(i))),
|
|
||||||
|
|
||||||
// There are some valid suffixes for integer and float literals,
|
impl LitKind {
|
||||||
// so all the handling is done internally.
|
/// Converts literal token with a suffix into a semantic literal.
|
||||||
token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)),
|
/// Works speculatively and may return `None` is diagnostic handler is not passed.
|
||||||
token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)),
|
/// If diagnostic handler is passed, always returns `Some`,
|
||||||
|
/// possibly after reporting non-fatal errors and recovery.
|
||||||
|
crate fn from_lit_token(
|
||||||
|
lit: token::Lit,
|
||||||
|
suf: Option<Symbol>,
|
||||||
|
diag: Option<(Span, &Handler)>
|
||||||
|
) -> Option<LitKind> {
|
||||||
|
if suf.is_some() && !lit.may_have_suffix() {
|
||||||
|
err!(diag, |span, diag| {
|
||||||
|
expect_no_suffix(span, diag, &format!("a {}", lit.literal_name()), suf)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
token::Str_(mut sym) => {
|
Some(match lit {
|
||||||
// If there are no characters requiring special treatment we can
|
token::Bool(i) => {
|
||||||
// reuse the symbol from the Token. Otherwise, we must generate a
|
assert!(i == keywords::True.name() || i == keywords::False.name());
|
||||||
// new symbol because the string in the LitKind is different to the
|
LitKind::Bool(i == keywords::True.name())
|
||||||
// string in the Token.
|
}
|
||||||
let mut has_error = false;
|
token::Byte(i) => {
|
||||||
let s = &sym.as_str();
|
match unescape_byte(&i.as_str()) {
|
||||||
if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
|
Ok(c) => LitKind::Byte(c),
|
||||||
let mut buf = String::with_capacity(s.len());
|
Err(_) => LitKind::Err(i),
|
||||||
unescape_str(s, &mut |_, unescaped_char| {
|
}
|
||||||
match unescaped_char {
|
},
|
||||||
|
token::Char(i) => {
|
||||||
|
match unescape_char(&i.as_str()) {
|
||||||
|
Ok(c) => LitKind::Char(c),
|
||||||
|
Err(_) => LitKind::Err(i),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
token::Err(i) => LitKind::Err(i),
|
||||||
|
|
||||||
|
// There are some valid suffixes for integer and float literals,
|
||||||
|
// so all the handling is done internally.
|
||||||
|
token::Integer(s) => return integer_lit(&s.as_str(), suf, diag),
|
||||||
|
token::Float(s) => return float_lit(&s.as_str(), suf, diag),
|
||||||
|
|
||||||
|
token::Str_(mut sym) => {
|
||||||
|
// If there are no characters requiring special treatment we can
|
||||||
|
// reuse the symbol from the Token. Otherwise, we must generate a
|
||||||
|
// new symbol because the string in the LitKind is different to the
|
||||||
|
// string in the Token.
|
||||||
|
let mut has_error = false;
|
||||||
|
let s = &sym.as_str();
|
||||||
|
if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
|
||||||
|
let mut buf = String::with_capacity(s.len());
|
||||||
|
unescape_str(s, &mut |_, unescaped_char| {
|
||||||
|
match unescaped_char {
|
||||||
|
Ok(c) => buf.push(c),
|
||||||
|
Err(_) => has_error = true,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if has_error {
|
||||||
|
return Some(LitKind::Err(sym));
|
||||||
|
}
|
||||||
|
sym = Symbol::intern(&buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
LitKind::Str(sym, ast::StrStyle::Cooked)
|
||||||
|
}
|
||||||
|
token::StrRaw(mut sym, n) => {
|
||||||
|
// Ditto.
|
||||||
|
let s = &sym.as_str();
|
||||||
|
if s.contains('\r') {
|
||||||
|
sym = Symbol::intern(&raw_str_lit(s));
|
||||||
|
}
|
||||||
|
LitKind::Str(sym, ast::StrStyle::Raw(n))
|
||||||
|
}
|
||||||
|
token::ByteStr(i) => {
|
||||||
|
let s = &i.as_str();
|
||||||
|
let mut buf = Vec::with_capacity(s.len());
|
||||||
|
let mut has_error = false;
|
||||||
|
unescape_byte_str(s, &mut |_, unescaped_byte| {
|
||||||
|
match unescaped_byte {
|
||||||
Ok(c) => buf.push(c),
|
Ok(c) => buf.push(c),
|
||||||
Err(_) => has_error = true,
|
Err(_) => has_error = true,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if has_error {
|
if has_error {
|
||||||
return (true, Some(LitKind::Err(sym)));
|
return Some(LitKind::Err(i));
|
||||||
}
|
}
|
||||||
sym = Symbol::intern(&buf)
|
buf.shrink_to_fit();
|
||||||
|
LitKind::ByteStr(Lrc::new(buf))
|
||||||
}
|
}
|
||||||
|
token::ByteStrRaw(i, _) => {
|
||||||
(true, Some(LitKind::Str(sym, ast::StrStyle::Cooked)))
|
LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))
|
||||||
}
|
|
||||||
token::StrRaw(mut sym, n) => {
|
|
||||||
// Ditto.
|
|
||||||
let s = &sym.as_str();
|
|
||||||
if s.contains('\r') {
|
|
||||||
sym = Symbol::intern(&raw_str_lit(s));
|
|
||||||
}
|
}
|
||||||
(true, Some(LitKind::Str(sym, ast::StrStyle::Raw(n))))
|
})
|
||||||
}
|
|
||||||
token::ByteStr(i) => {
|
|
||||||
let s = &i.as_str();
|
|
||||||
let mut buf = Vec::with_capacity(s.len());
|
|
||||||
let mut has_error = false;
|
|
||||||
unescape_byte_str(s, &mut |_, unescaped_byte| {
|
|
||||||
match unescaped_byte {
|
|
||||||
Ok(c) => buf.push(c),
|
|
||||||
Err(_) => has_error = true,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if has_error {
|
|
||||||
return (true, Some(LitKind::Err(i)));
|
|
||||||
}
|
|
||||||
buf.shrink_to_fit();
|
|
||||||
(true, Some(LitKind::ByteStr(Lrc::new(buf))))
|
|
||||||
}
|
|
||||||
token::ByteStrRaw(i, _) => {
|
|
||||||
(true, Some(LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
-> Option<ast::LitKind> {
|
-> Option<LitKind> {
|
||||||
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
||||||
let suffix = match suffix {
|
let suffix = match suffix {
|
||||||
Some(suffix) => suffix,
|
Some(suffix) => suffix,
|
||||||
None => return Some(ast::LitKind::FloatUnsuffixed(data)),
|
None => return Some(LitKind::FloatUnsuffixed(data)),
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(match &*suffix.as_str() {
|
Some(match &*suffix.as_str() {
|
||||||
"f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
|
"f32" => LitKind::Float(data, ast::FloatTy::F32),
|
||||||
"f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
|
"f64" => LitKind::Float(data, ast::FloatTy::F64),
|
||||||
suf => {
|
suf => {
|
||||||
err!(diag, |span, diag| {
|
err!(diag, |span, diag| {
|
||||||
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
||||||
|
@ -477,12 +531,12 @@ fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
ast::LitKind::FloatUnsuffixed(data)
|
LitKind::FloatUnsuffixed(data)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
-> Option<ast::LitKind> {
|
-> Option<LitKind> {
|
||||||
debug!("float_lit: {:?}, {:?}", s, suffix);
|
debug!("float_lit: {:?}, {:?}", s, suffix);
|
||||||
// FIXME #2252: bounds checking float literals is deferred until trans
|
// FIXME #2252: bounds checking float literals is deferred until trans
|
||||||
|
|
||||||
|
@ -499,7 +553,7 @@ fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
-> Option<ast::LitKind> {
|
-> Option<LitKind> {
|
||||||
// s can only be ascii, byte indexing is fine
|
// s can only be ascii, byte indexing is fine
|
||||||
|
|
||||||
// Strip underscores without allocating a new String unless necessary.
|
// Strip underscores without allocating a new String unless necessary.
|
||||||
|
@ -595,7 +649,7 @@ fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
|
string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
|
||||||
|
|
||||||
Some(match u128::from_str_radix(s, base) {
|
Some(match u128::from_str_radix(s, base) {
|
||||||
Ok(r) => ast::LitKind::Int(r, ty),
|
Ok(r) => LitKind::Int(r, ty),
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
// small bases are lexed as if they were base 10, e.g, the string
|
// small bases are lexed as if they were base 10, e.g, the string
|
||||||
// might be `0b10201`. This will cause the conversion above to fail,
|
// might be `0b10201`. This will cause the conversion above to fail,
|
||||||
|
@ -608,7 +662,7 @@ fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
if !already_errored {
|
if !already_errored {
|
||||||
err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
|
err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
|
||||||
}
|
}
|
||||||
ast::LitKind::Int(0, ty)
|
LitKind::Int(0, ty)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@ use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
|
||||||
use crate::ast::{GenericParam, GenericParamKind};
|
use crate::ast::{GenericParam, GenericParamKind};
|
||||||
use crate::ast::GenericArg;
|
use crate::ast::GenericArg;
|
||||||
use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
|
use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
|
||||||
use crate::ast::{Label, Lifetime, Lit, LitKind};
|
use crate::ast::{Label, Lifetime, Lit};
|
||||||
use crate::ast::{Local, LocalSource};
|
use crate::ast::{Local, LocalSource};
|
||||||
use crate::ast::MacStmtStyle;
|
use crate::ast::MacStmtStyle;
|
||||||
use crate::ast::{Mac, Mac_, MacDelimiter};
|
use crate::ast::{Mac, Mac_, MacDelimiter};
|
||||||
|
@ -46,7 +46,7 @@ use crate::ptr::P;
|
||||||
use crate::parse::PResult;
|
use crate::parse::PResult;
|
||||||
use crate::ThinVec;
|
use crate::ThinVec;
|
||||||
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||||
use crate::symbol::{Symbol, keywords};
|
use crate::symbol::{keywords, Symbol};
|
||||||
|
|
||||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||||
use rustc_target::spec::abi::{self, Abi};
|
use rustc_target::spec::abi::{self, Abi};
|
||||||
|
@ -1109,43 +1109,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
|
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
|
||||||
match suffix {
|
parse::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix)
|
||||||
None => {/* everything ok */}
|
|
||||||
Some(suf) => {
|
|
||||||
let text = suf.as_str();
|
|
||||||
if text.is_empty() {
|
|
||||||
self.span_bug(sp, "found empty literal suffix in Some")
|
|
||||||
}
|
|
||||||
let mut err = if kind == "a tuple index" &&
|
|
||||||
["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
|
|
||||||
{
|
|
||||||
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
|
||||||
// through the ecosystem when people fix their macros
|
|
||||||
let mut err = self.struct_span_warn(
|
|
||||||
sp,
|
|
||||||
&format!("suffixes on {} are invalid", kind),
|
|
||||||
);
|
|
||||||
err.note(&format!(
|
|
||||||
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
|
||||||
incorrectly accepted on stable for a few releases",
|
|
||||||
text,
|
|
||||||
));
|
|
||||||
err.help(
|
|
||||||
"on proc macros, you'll want to use `syn::Index::from` or \
|
|
||||||
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
|
||||||
to tuple field access",
|
|
||||||
);
|
|
||||||
err.note(
|
|
||||||
"for more context, see https://github.com/rust-lang/rust/issues/60210",
|
|
||||||
);
|
|
||||||
err
|
|
||||||
} else {
|
|
||||||
self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
|
|
||||||
};
|
|
||||||
err.span_label(sp, format!("invalid suffix `{}`", text));
|
|
||||||
err.emit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
|
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
|
||||||
|
@ -1452,9 +1416,6 @@ impl<'a> Parser<'a> {
|
||||||
crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
||||||
self.sess.span_diagnostic.struct_span_err(sp, m)
|
self.sess.span_diagnostic.struct_span_err(sp, m)
|
||||||
}
|
}
|
||||||
fn struct_span_warn<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
|
||||||
self.sess.span_diagnostic.struct_span_warn(sp, m)
|
|
||||||
}
|
|
||||||
crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
|
crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
|
||||||
self.sess.span_diagnostic.span_bug(sp, m)
|
self.sess.span_diagnostic.span_bug(sp, m)
|
||||||
}
|
}
|
||||||
|
@ -2069,85 +2030,45 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `token_lit = LIT_INTEGER | ...`.
|
|
||||||
fn parse_lit_token(&mut self) -> PResult<'a, (LitKind, token::Lit, Option<Symbol>)> {
|
|
||||||
let out = match self.token {
|
|
||||||
token::Interpolated(ref nt) => match **nt {
|
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
|
||||||
ExprKind::Lit(ref lit) => { (lit.node.clone(), lit.token, lit.suffix) }
|
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
|
||||||
},
|
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
|
||||||
},
|
|
||||||
token::Literal(lit, suf) => {
|
|
||||||
let diag = Some((self.span, &self.sess.span_diagnostic));
|
|
||||||
let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
|
|
||||||
|
|
||||||
if suffix_illegal {
|
|
||||||
let sp = self.span;
|
|
||||||
self.expect_no_suffix(sp, &format!("a {}", lit.literal_name()), suf)
|
|
||||||
}
|
|
||||||
|
|
||||||
(result.unwrap(), lit, suf)
|
|
||||||
}
|
|
||||||
token::Dot if self.look_ahead(1, |t| match t {
|
|
||||||
token::Literal(token::Lit::Integer(_) , _) => true,
|
|
||||||
_ => false,
|
|
||||||
}) => { // recover from `let x = .4;`
|
|
||||||
let lo = self.span;
|
|
||||||
self.bump();
|
|
||||||
if let token::Literal(
|
|
||||||
token::Lit::Integer(val),
|
|
||||||
suffix,
|
|
||||||
) = self.token {
|
|
||||||
let float_suffix = suffix.and_then(|s| {
|
|
||||||
let s = s.as_str();
|
|
||||||
if s == "f32" {
|
|
||||||
Some("f32")
|
|
||||||
} else if s == "f64" {
|
|
||||||
Some("f64")
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}).unwrap_or("");
|
|
||||||
self.bump();
|
|
||||||
let sp = lo.to(self.prev_span);
|
|
||||||
let mut err = self.diagnostic()
|
|
||||||
.struct_span_err(sp, "float literals must have an integer part");
|
|
||||||
err.span_suggestion(
|
|
||||||
sp,
|
|
||||||
"must have an integer part",
|
|
||||||
format!("0.{}{}", val, float_suffix),
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
err.emit();
|
|
||||||
return Ok((match float_suffix {
|
|
||||||
"f32" => ast::LitKind::Float(val, ast::FloatTy::F32),
|
|
||||||
"f64" => ast::LitKind::Float(val, ast::FloatTy::F64),
|
|
||||||
_ => ast::LitKind::FloatUnsuffixed(val),
|
|
||||||
}, token::Float(val), suffix));
|
|
||||||
} else {
|
|
||||||
unreachable!();
|
|
||||||
};
|
|
||||||
}
|
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
|
||||||
};
|
|
||||||
|
|
||||||
self.bump();
|
|
||||||
Ok(out)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Matches `lit = true | false | token_lit`.
|
/// Matches `lit = true | false | token_lit`.
|
||||||
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
||||||
let lo = self.span;
|
let diag = Some((self.span, &self.sess.span_diagnostic));
|
||||||
let (node, token, suffix) = if self.eat_keyword(keywords::True) {
|
if let Some(lit) = Lit::from_token(&self.token, self.span, diag) {
|
||||||
(LitKind::Bool(true), token::Bool(keywords::True.name()), None)
|
self.bump();
|
||||||
} else if self.eat_keyword(keywords::False) {
|
return Ok(lit);
|
||||||
(LitKind::Bool(false), token::Bool(keywords::False.name()), None)
|
} else if self.token == token::Dot {
|
||||||
} else {
|
// Recover `.4` as `0.4`.
|
||||||
self.parse_lit_token()?
|
let recovered = self.look_ahead(1, |t| {
|
||||||
};
|
if let token::Literal(token::Integer(val), suf) = *t {
|
||||||
Ok(Lit { node, token, suffix, span: lo.to(self.prev_span) })
|
let next_span = self.look_ahead_span(1);
|
||||||
|
if self.span.hi() == next_span.lo() {
|
||||||
|
let sym = String::from("0.") + &val.as_str();
|
||||||
|
let token = token::Literal(token::Float(Symbol::intern(&sym)), suf);
|
||||||
|
return Some((token, self.span.to(next_span)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
});
|
||||||
|
if let Some((token, span)) = recovered {
|
||||||
|
self.diagnostic()
|
||||||
|
.struct_span_err(span, "float literals must have an integer part")
|
||||||
|
.span_suggestion(
|
||||||
|
span,
|
||||||
|
"must have an integer part",
|
||||||
|
pprust::token_to_string(&token),
|
||||||
|
Applicability::MachineApplicable,
|
||||||
|
)
|
||||||
|
.emit();
|
||||||
|
let diag = Some((span, &self.sess.span_diagnostic));
|
||||||
|
if let Some(lit) = Lit::from_token(&token, span, diag) {
|
||||||
|
self.bump();
|
||||||
|
self.bump();
|
||||||
|
return Ok(lit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.unexpected_last(&self.token)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
||||||
|
|
|
@ -90,6 +90,13 @@ impl Lit {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
crate fn may_have_suffix(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Integer(..) | Float(..) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||||
// *probably* equal here rather than actual equality
|
// *probably* equal here rather than actual equality
|
||||||
fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
|
fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
|
||||||
|
|
|
@ -162,7 +162,7 @@ fn binop_to_string(op: BinOpToken) -> &'static str {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn literal_to_string(lit: token::Lit, suffix: Option<ast::Name>) -> String {
|
pub fn literal_to_string(lit: token::Lit, suffix: Option<ast::Name>) -> String {
|
||||||
let mut out = match lit {
|
let mut out = match lit {
|
||||||
token::Byte(b) => format!("b'{}'", b),
|
token::Byte(b) => format!("b'{}'", b),
|
||||||
token::Char(c) => format!("'{}'", c),
|
token::Char(c) => format!("'{}'", c),
|
||||||
|
|
|
@ -2,8 +2,7 @@
|
||||||
|
|
||||||
macro_rules! check {
|
macro_rules! check {
|
||||||
($expr: expr) => (
|
($expr: expr) => (
|
||||||
#[my_attr = $expr] //~ ERROR suffixed literals are not allowed in attributes
|
#[my_attr = $expr] //~ ERROR unexpected token: `-0`
|
||||||
//~| ERROR unexpected token: `-0`
|
|
||||||
//~| ERROR unexpected token: `0 + 0`
|
//~| ERROR unexpected token: `0 + 0`
|
||||||
use main as _;
|
use main as _;
|
||||||
);
|
);
|
||||||
|
@ -11,7 +10,7 @@ macro_rules! check {
|
||||||
|
|
||||||
check!("0"); // OK
|
check!("0"); // OK
|
||||||
check!(0); // OK
|
check!(0); // OK
|
||||||
check!(0u8); // ERROR, see above
|
check!(0u8); //~ ERROR suffixed literals are not allowed in attributes
|
||||||
check!(-0); // ERROR, see above
|
check!(-0); // ERROR, see above
|
||||||
check!(0 + 0); // ERROR, see above
|
check!(0 + 0); // ERROR, see above
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,8 @@
|
||||||
error: suffixed literals are not allowed in attributes
|
error: suffixed literals are not allowed in attributes
|
||||||
--> $DIR/malformed-interpolated.rs:5:21
|
--> $DIR/malformed-interpolated.rs:13:8
|
||||||
|
|
|
|
||||||
LL | #[my_attr = $expr]
|
LL | check!(0u8);
|
||||||
| ^^^^^
|
| ^^^
|
||||||
...
|
|
||||||
LL | check!(0u8); // ERROR, see above
|
|
||||||
| ------------ in this macro invocation
|
|
||||||
|
|
|
|
||||||
= help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
|
= help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue