1
Fork 0

Some code cleanup and tidy/test fixes

This commit is contained in:
Vadim Petrochenkov 2019-06-05 14:17:56 +03:00
parent 738e14565d
commit ff40e37b98
17 changed files with 109 additions and 101 deletions

View file

@ -56,7 +56,7 @@ extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
use syntax::parse::token;
use syntax::parse::token::{self, Token};
use syntax::tokenstream::TokenTree;
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder; // A trait for expr_usize.
@ -64,7 +64,7 @@ use syntax_pos::Span;
use rustc_plugin::Registry;
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-> Box<MacResult + 'static> {
-> Box<dyn MacResult + 'static> {
static NUMERALS: &'static [(&'static str, usize)] = &[
("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
}
let text = match args[0] {
TokenTree::Token(_, token::Ident(s)) => s.to_string(),
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);

View file

@ -1414,15 +1414,9 @@ impl KeywordIdents {
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
for tt in tokens.into_trees() {
match tt {
TokenTree::Token(token) => match token.ident() {
// only report non-raw idents
Some((ident, false)) => {
self.check_ident_token(cx, UnderMacro(true), ast::Ident {
span: token.span.substitute_dummy(ident.span),
..ident
});
}
_ => {},
// Only report non-raw idents.
TokenTree::Token(token) => if let Some((ident, false)) = token.ident() {
self.check_ident_token(cx, UnderMacro(true), ident);
}
TokenTree::Delimited(_, _, tts) => {
self.check_tokens(cx, tts)

View file

@ -483,7 +483,8 @@ impl MetaItem {
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
let mut segments = if let token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
= tokens.peek() {
tokens.next();
vec![PathSegment::from_ident(Ident::new(name, span))]
} else {
@ -493,12 +494,14 @@ impl MetaItem {
vec![PathSegment::path_root(span)]
};
loop {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
= tokens.next() {
segments.push(PathSegment::from_ident(Ident::new(name, span)));
} else {
return None;
}
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
= tokens.peek() {
tokens.next();
} else {
break;

View file

@ -77,7 +77,9 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
},
(3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })),
Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
Some(&TokenTree::Token(Token {
kind: token::Literal(token::Lit { symbol, .. }), ..
}))) => {
(code, Some(symbol))
}
_ => unreachable!()

View file

@ -3,7 +3,7 @@
//! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat
//! redundant. Later, these types can be converted to types for use by the rest of the compiler.
use crate::syntax::ast::NodeId;
use crate::ast::NodeId;
use syntax_pos::MultiSpan;
/// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be

View file

@ -24,12 +24,12 @@ pub struct Delimited {
impl Delimited {
/// Returns the opening delimiter (possibly `NoDelim`).
pub fn open_token(&self) -> token::TokenKind {
pub fn open_token(&self) -> TokenKind {
token::OpenDelim(self.delim)
}
/// Returns the closing delimiter (possibly `NoDelim`).
pub fn close_token(&self) -> token::TokenKind {
pub fn close_token(&self) -> TokenKind {
token::CloseDelim(self.delim)
}
@ -59,7 +59,7 @@ pub struct SequenceRepetition {
/// The sequence of token trees
pub tts: Vec<TokenTree>,
/// The optional separator
pub separator: Option<token::TokenKind>,
pub separator: Option<TokenKind>,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `Match`s that appear in the sequence (and subsequences)
@ -210,7 +210,8 @@ pub fn parse(
match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() {
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) =>
match trees.next() {
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((kind, _)) => {
let span = token.span.with_lo(start_sp.lo());
@ -370,7 +371,7 @@ where
/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
/// `None`.
fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> {
fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
match *token {
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
@ -423,7 +424,7 @@ fn parse_sep_and_kleene_op<I>(
attrs: &[ast::Attribute],
edition: Edition,
macro_node_id: NodeId,
) -> (Option<token::TokenKind>, KleeneOp)
) -> (Option<TokenKind>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
@ -448,7 +449,7 @@ fn parse_sep_and_kleene_op_2015<I>(
_features: &Features,
_attrs: &[ast::Attribute],
macro_node_id: NodeId,
) -> (Option<token::TokenKind>, KleeneOp)
) -> (Option<TokenKind>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
@ -566,7 +567,7 @@ fn parse_sep_and_kleene_op_2018<I>(
sess: &ParseSess,
_features: &Features,
_attrs: &[ast::Attribute],
) -> (Option<token::TokenKind>, KleeneOp)
) -> (Option<TokenKind>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{

View file

@ -242,7 +242,7 @@ pub fn transcribe(
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
sp = sp.apply_mark(cx.current_expansion.mark);
result.push(TokenTree::token(token::Dollar, sp).into());
result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into());
result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into());
}
}

View file

@ -137,12 +137,6 @@ pub mod util {
pub mod json;
pub mod syntax {
pub use crate::ext;
pub use crate::parse;
pub use crate::ast;
}
pub mod ast;
pub mod attr;
pub mod source_map;

View file

@ -2,8 +2,9 @@ use crate::ast::{
self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
};
use crate::parse::{SeqSep, token, PResult, Parser};
use crate::parse::{SeqSep, PResult, Parser};
use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
use crate::parse::token::{self, TokenKind};
use crate::print::pprust;
use crate::ptr::P;
use crate::source_map::Spanned;
@ -229,8 +230,8 @@ impl<'a> Parser<'a> {
pub fn expected_one_of_not_found(
&mut self,
edible: &[token::TokenKind],
inedible: &[token::TokenKind],
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
fn tokens_to_string(tokens: &[TokenType]) -> String {
let mut i = tokens.iter();
@ -368,7 +369,7 @@ impl<'a> Parser<'a> {
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
crate fn eat_to_tokens(&mut self, kets: &[&token::TokenKind]) {
crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
let handler = self.diagnostic();
if let Err(ref mut err) = self.parse_seq_to_before_tokens(
@ -388,7 +389,7 @@ impl<'a> Parser<'a> {
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
/// ^^ help: remove extra angle brackets
/// ```
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::TokenKind) {
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
// This function is intended to be invoked after parsing a path segment where there are two
// cases:
//
@ -726,7 +727,7 @@ impl<'a> Parser<'a> {
/// closing delimiter.
pub fn unexpected_try_recover(
&mut self,
t: &token::TokenKind,
t: &TokenKind,
) -> PResult<'a, bool /* recovered */> {
let token_str = pprust::token_to_string(t);
let this_token_str = self.this_token_descr();
@ -903,7 +904,7 @@ impl<'a> Parser<'a> {
crate fn recover_closing_delimiter(
&mut self,
tokens: &[token::TokenKind],
tokens: &[TokenKind],
mut err: DiagnosticBuilder<'a>,
) -> PResult<'a, bool> {
let mut pos = None;

View file

@ -272,7 +272,8 @@ impl<'a> Parser<'a> {
if self.token == token::Dot {
// Attempt to recover `.4` as `0.4`.
recovered = self.look_ahead(1, |t| {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = t.kind {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
= t.kind {
let next_span = self.look_ahead_span(1);
if self.span.hi() == next_span.lo() {
let s = String::from("0.") + &symbol.as_str();

View file

@ -5,7 +5,8 @@ use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
use crate::source_map::{SourceMap, FilePathMapping};
use crate::feature_gate::UnstableFeatures;
use crate::parse::parser::Parser;
use crate::syntax::parse::parser::emit_unclosed_delims;
use crate::parse::parser::emit_unclosed_delims;
use crate::parse::token::TokenKind;
use crate::tokenstream::{TokenStream, TokenTree};
use crate::diagnostics::plugin::ErrorMap;
use crate::print::pprust::token_to_string;
@ -358,13 +359,13 @@ pub fn stream_to_parser_with_base_dir<'a>(
/// A sequence separator.
pub struct SeqSep {
/// The seperator token.
pub sep: Option<token::TokenKind>,
pub sep: Option<TokenKind>,
/// `true` if a trailing separator is allowed.
pub trailing_sep_allowed: bool,
}
impl SeqSep {
pub fn trailing_allowed(t: token::TokenKind) -> SeqSep {
pub fn trailing_allowed(t: TokenKind) -> SeqSep {
SeqSep {
sep: Some(t),
trailing_sep_allowed: true,
@ -426,7 +427,9 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })),
Some(&TokenTree::Token(Token {
kind: token::Ident(name_macro_rules, false), ..
})),
Some(&TokenTree::Token(Token { kind: token::Not, .. })),
Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
@ -446,7 +449,9 @@ mod tests {
(
2,
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
Some(&TokenTree::Token(Token {
kind: token::Ident(name, false), ..
})),
)
if first_delim == token::Paren && name.as_str() == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@ -456,7 +461,9 @@ mod tests {
(
2,
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
Some(&TokenTree::Token(Token {
kind: token::Ident(name, false), ..
})),
)
if second_delim == token::Paren && name.as_str() == "a" => {},
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),

View file

@ -38,7 +38,7 @@ use crate::source_map::{self, SourceMap, Spanned, respan};
use crate::parse::{SeqSep, classify, literal, token};
use crate::parse::lexer::UnmatchedBrace;
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use crate::parse::token::{Token, DelimToken};
use crate::parse::token::{Token, TokenKind, DelimToken};
use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
use crate::util::parser::{AssocOp, Fixity};
use crate::print::pprust;
@ -337,8 +337,8 @@ impl TokenCursor {
}
fn next_desugared(&mut self) -> Token {
let (sp, name) = match self.next() {
Token { span, kind: token::DocComment(name) } => (span, name),
let (name, sp) = match self.next() {
Token { kind: token::DocComment(name), span } => (name, span),
tok => return tok,
};
@ -364,7 +364,7 @@ impl TokenCursor {
[
TokenTree::token(token::Ident(sym::doc, false), sp),
TokenTree::token(token::Eq, sp),
TokenTree::token(token::TokenKind::lit(
TokenTree::token(TokenKind::lit(
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
), sp),
]
@ -389,7 +389,7 @@ impl TokenCursor {
#[derive(Clone, PartialEq)]
crate enum TokenType {
Token(token::TokenKind),
Token(TokenKind),
Keyword(Symbol),
Operator,
Lifetime,
@ -419,7 +419,7 @@ impl TokenType {
///
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
/// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &token::TokenKind) -> bool {
fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool {
t == &token::ModSep || t == &token::Lt ||
t == &token::BinOp(token::Shl)
}
@ -565,7 +565,7 @@ impl<'a> Parser<'a> {
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &token::TokenKind) -> PResult<'a, bool /* recovered */> {
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump();
@ -583,8 +583,8 @@ impl<'a> Parser<'a> {
/// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(
&mut self,
edible: &[token::TokenKind],
inedible: &[token::TokenKind],
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
if edible.contains(&self.token) {
self.bump();
@ -646,14 +646,14 @@ impl<'a> Parser<'a> {
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered.
crate fn check(&mut self, tok: &token::TokenKind) -> bool {
crate fn check(&mut self, tok: &TokenKind) -> bool {
let is_present = self.token == *tok;
if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
is_present
}
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
pub fn eat(&mut self, tok: &token::TokenKind) -> bool {
pub fn eat(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check(tok);
if is_present { self.bump() }
is_present
@ -889,7 +889,7 @@ impl<'a> Parser<'a> {
/// `f` must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_seq_to_end<T, F>(&mut self,
ket: &token::TokenKind,
ket: &TokenKind,
sep: SeqSep,
f: F)
-> PResult<'a, Vec<T>> where
@ -907,7 +907,7 @@ impl<'a> Parser<'a> {
/// closing bracket.
pub fn parse_seq_to_before_end<T, F>(
&mut self,
ket: &token::TokenKind,
ket: &TokenKind,
sep: SeqSep,
f: F,
) -> PResult<'a, (Vec<T>, bool)>
@ -918,7 +918,7 @@ impl<'a> Parser<'a> {
crate fn parse_seq_to_before_tokens<T, F>(
&mut self,
kets: &[&token::TokenKind],
kets: &[&TokenKind],
sep: SeqSep,
expect: TokenExpectType,
mut f: F,
@ -992,8 +992,8 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_unspanned_seq<T, F>(
&mut self,
bra: &token::TokenKind,
ket: &token::TokenKind,
bra: &TokenKind,
ket: &TokenKind,
sep: SeqSep,
f: F,
) -> PResult<'a, Vec<T>> where
@ -1036,7 +1036,7 @@ impl<'a> Parser<'a> {
/// Advance the parser using provided token as a next one. Use this when
/// consuming a part of a token. For example a single `<` from `<<`.
fn bump_with(&mut self, next: token::TokenKind, span: Span) {
fn bump_with(&mut self, next: TokenKind, span: Span) {
self.prev_span = self.span.with_hi(span.lo());
// It would be incorrect to record the kind of the current token, but
// fortunately for tokens currently using `bump_with`, the
@ -1050,7 +1050,6 @@ impl<'a> Parser<'a> {
F: FnOnce(&Token) -> R,
{
if dist == 0 {
// FIXME: Avoid cloning here.
return f(&self.token);
}
@ -1058,7 +1057,8 @@ impl<'a> Parser<'a> {
f(&match frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(token) => token,
TokenTree::Delimited(dspan, delim, _) => Token::new(token::OpenDelim(delim), dspan.open),
TokenTree::Delimited(dspan, delim, _) =>
Token::new(token::OpenDelim(delim), dspan.open),
}
None => Token::new(token::CloseDelim(frame.delim), frame.span.close)
})
@ -1768,7 +1768,7 @@ impl<'a> Parser<'a> {
fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
let ident = self.parse_path_segment_ident()?;
let is_args_start = |token: &token::TokenKind| match *token {
let is_args_start = |token: &TokenKind| match *token {
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
| token::LArrow => true,
_ => false,
@ -1864,7 +1864,8 @@ impl<'a> Parser<'a> {
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
self.token.kind {
self.expect_no_suffix(self.span, "a tuple index", suffix);
self.bump();
Ok(Ident::new(symbol, self.prev_span))
@ -2649,8 +2650,10 @@ impl<'a> Parser<'a> {
// Interpolated identifier and lifetime tokens are replaced with usual identifier
// and lifetime tokens, so the former are never encountered during normal parsing.
match **nt {
token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident.name, is_raw), ident.span),
token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span),
token::NtIdent(ident, is_raw) =>
Token::new(token::Ident(ident.name, is_raw), ident.span),
token::NtLifetime(ident) =>
Token::new(token::Lifetime(ident.name), ident.span),
_ => return,
}
}
@ -4481,7 +4484,9 @@ impl<'a> Parser<'a> {
// We used to incorrectly stop parsing macro-expanded statements here.
// If the next token will be an error anyway but could have parsed with the
// earlier behavior, stop parsing here and emit a warning to avoid breakage.
else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token.kind {
else if macro_legacy_warnings &&
self.token.can_begin_expr() &&
match self.token.kind {
// These can continue an expression, so we can't stop parsing and warn.
token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
token::BinOp(token::Minus) | token::BinOp(token::Star) |
@ -6409,7 +6414,7 @@ impl<'a> Parser<'a> {
}
/// Given a termination token, parses all of the items in a module.
fn parse_mod_items(&mut self, term: &token::TokenKind, inner_lo: Span) -> PResult<'a, Mod> {
fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> {
let mut items = vec![];
while let Some(item) = self.parse_item()? {
items.push(item);

View file

@ -5,11 +5,10 @@ pub use LitKind::*;
pub use TokenKind::*;
use crate::ast::{self};
use crate::parse::ParseSess;
use crate::parse::{parse_stream_from_source_str, ParseSess};
use crate::print::pprust;
use crate::ptr::P;
use crate::symbol::kw;
use crate::syntax::parse::parse_stream_from_source_str;
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
use syntax_pos::symbol::Symbol;

View file

@ -203,7 +203,8 @@ impl TokenStream {
if let Some((_, next)) = iter.peek() {
let sp = match (&ts, &next) {
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
((TokenTree::Token(token_left), NonJoint), (TokenTree::Token(token_right), _))
((TokenTree::Token(token_left), NonJoint),
(TokenTree::Token(token_right), _))
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|| token_left.is_lit()) &&
((token_right.is_ident() && !token_right.is_reserved_ident())
@ -575,7 +576,7 @@ impl DelimSpan {
#[cfg(test)]
mod tests {
use super::*;
use crate::syntax::ast::Name;
use crate::ast::Name;
use crate::with_default_globals;
use crate::util::parser_testing::string_to_stream;
use syntax_pos::{Span, BytePos, NO_EXPANSION};

View file

@ -103,7 +103,8 @@ fn parse_assert<'a>(
//
// Parse this as an actual message, and suggest inserting a comma. Eventually, this should be
// turned into an error.
let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token.kind {
let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. })
= parser.token.kind {
let mut err = cx.struct_span_warn(parser.span, "unexpected string literal");
let comma_span = cx.source_map().next_point(parser.prev_span);
err.span_suggestion_short(

View file

@ -921,10 +921,9 @@ pub struct Interner {
impl Interner {
fn prefill(init: &[&'static str]) -> Self {
let symbols = (0 .. init.len() as u32).map(Symbol::new);
Interner {
strings: init.to_vec(),
names: init.iter().copied().zip(symbols).collect(),
strings: init.into(),
names: init.iter().copied().zip((0..).map(Symbol::new)).collect(),
..Default::default()
}
}

View file

@ -1,3 +1,9 @@
// WARNING WARNING WARNING WARNING WARNING
// =======================================
//
// This code also appears in src/doc/unstable-book/src/language-features/plugin.md.
// Please keep the two copies in sync! FIXME: have rustdoc read this file
// force-host
#![crate_type="dylib"]
@ -8,21 +14,15 @@ extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
use syntax::parse::token;
use syntax::parse::token::{self, Token};
use syntax::tokenstream::TokenTree;
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder; // trait for expr_usize
use syntax::ext::build::AstBuilder; // A trait for expr_usize.
use syntax_pos::Span;
use rustc_plugin::Registry;
// WARNING WARNING WARNING WARNING WARNING
// =======================================
//
// This code also appears in src/doc/unstable-book/src/language-features/plugin.md.
// Please keep the two copies in sync! FIXME: have rustdoc read this file
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-> Box<MacResult + 'static> {
-> Box<dyn MacResult + 'static> {
static NUMERALS: &'static [(&'static str, usize)] = &[
("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@ -38,7 +38,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
}
let text = match args[0] {
TokenTree::Token(_, token::Ident(s, _)) => s.to_string(),
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);