Change Token::interpolated_to_tokenstream()
.
It is currently a method of `Token`, but it only is valid to call if `self` is a `Token::Interpolated`. This commit eliminates the possibility of misuse by changing it to an associated function that takes a `Nonterminal`, which also simplifies the call sites. This requires splitting out a new function, `nonterminal_to_string`.
This commit is contained in:
parent
f47ec2ad5b
commit
d26bf742db
4 changed files with 38 additions and 40 deletions
|
@ -1131,13 +1131,13 @@ impl<'a> LoweringContext<'a> {
|
|||
|
||||
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
|
||||
match token {
|
||||
Token::Interpolated(_) => {}
|
||||
other => return TokenTree::Token(span, other).into(),
|
||||
}
|
||||
|
||||
let tts = token.interpolated_to_tokenstream(&self.sess.parse_sess, span);
|
||||
Token::Interpolated(nt) => {
|
||||
let tts = Token::interpolated_to_tokenstream(&self.sess.parse_sess, nt, span);
|
||||
self.lower_token_stream(tts)
|
||||
}
|
||||
other => TokenTree::Token(span, other).into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
|
||||
hir::Arm {
|
||||
|
|
|
@ -508,14 +508,8 @@ impl Token {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
|
||||
-> TokenStream
|
||||
{
|
||||
let nt = match *self {
|
||||
Token::Interpolated(ref nt) => nt,
|
||||
_ => panic!("only works on interpolated tokens"),
|
||||
};
|
||||
|
||||
pub fn interpolated_to_tokenstream(sess: &ParseSess, nt: Lrc<(Nonterminal, LazyTokenStream)>,
|
||||
span: Span) -> TokenStream {
|
||||
// An `Interpolated` token means that we have a `Nonterminal`
|
||||
// which is often a parsed AST item. At this point we now need
|
||||
// to convert the parsed AST to an actual token stream, e.g.
|
||||
|
@ -558,7 +552,7 @@ impl Token {
|
|||
|
||||
let tokens_for_real = nt.1.force(|| {
|
||||
// FIXME(#43081): Avoid this pretty-print + reparse hack
|
||||
let source = pprust::token_to_string(self);
|
||||
let source = pprust::nonterminal_to_string(&nt.0);
|
||||
let filename = FileName::macro_expansion_source_code(&source);
|
||||
let (tokens, errors) = parse_stream_from_source_str(
|
||||
filename, source, sess, Some(span));
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg};
|
|||
use crate::util::parser::{self, AssocOp, Fixity};
|
||||
use crate::attr;
|
||||
use crate::source_map::{self, SourceMap, Spanned};
|
||||
use crate::parse::token::{self, BinOpToken, Token};
|
||||
use crate::parse::token::{self, BinOpToken, Nonterminal, Token};
|
||||
use crate::parse::lexer::comments;
|
||||
use crate::parse::{self, ParseSess};
|
||||
use crate::print::pp::{self, Breaks};
|
||||
|
@ -257,7 +257,12 @@ pub fn token_to_string(tok: &Token) -> String {
|
|||
token::Comment => "/* */".to_string(),
|
||||
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
||||
|
||||
token::Interpolated(ref nt) => match nt.0 {
|
||||
token::Interpolated(ref nt) => nonterminal_to_string(&nt.0),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
||||
match *nt {
|
||||
token::NtExpr(ref e) => expr_to_string(e),
|
||||
token::NtMeta(ref e) => meta_item_to_string(e),
|
||||
token::NtTy(ref e) => ty_to_string(e),
|
||||
|
@ -280,7 +285,6 @@ pub fn token_to_string(tok: &Token) -> String {
|
|||
token::NtVis(ref e) => vis_to_string(e),
|
||||
token::NtForeignItem(ref e) => foreign_item_to_string(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ty_to_string(ty: &ast::Ty) -> String {
|
||||
|
|
|
@ -178,8 +178,8 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||
tt!(Punct::new('#', false))
|
||||
}
|
||||
|
||||
Interpolated(_) => {
|
||||
let stream = token.interpolated_to_tokenstream(sess, span);
|
||||
Interpolated(nt) => {
|
||||
let stream = Token::interpolated_to_tokenstream(sess, nt, span);
|
||||
TokenTree::Group(Group {
|
||||
delimiter: Delimiter::None,
|
||||
stream,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue