Auto merge of #40346 - jseyfried:path_and_tokenstream_attr, r=nrc
`TokenStream`-based attributes, paths in attribute and derive macro invocations This PR - refactors `Attribute` to use `Path` and `TokenStream` instead of `MetaItem`. - supports macro invocation paths for attribute procedural macros. - e.g. `#[::foo::attr_macro] struct S;`, `#[cfg_attr(all(), foo::attr_macro)] struct S;` - supports macro invocation paths for derive procedural macros. - e.g. `#[derive(foo::Bar, super::Baz)] struct S;` - supports arbitrary tokens as arguments to attribute procedural macros. - e.g. `#[foo::attr_macro arbitrary + tokens] struct S;` - supports using arbitrary tokens in "inert attributes" with derive procedural macros. - e.g. `#[derive(Foo)] struct S(#[inert arbitrary + tokens] i32);` where `#[proc_macro_derive(Foo, attributes(inert))]` r? @nrc
This commit is contained in:
commit
9c15de4fd5
56 changed files with 892 additions and 548 deletions
|
@ -120,11 +120,12 @@ impl<'a> CheckAttrVisitor<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_attribute(&self, attr: &ast::Attribute, target: Target) {
|
fn check_attribute(&self, attr: &ast::Attribute, target: Target) {
|
||||||
let name: &str = &attr.name().as_str();
|
if let Some(name) = attr.name() {
|
||||||
match name {
|
match &*name.as_str() {
|
||||||
"inline" => self.check_inline(attr, target),
|
"inline" => self.check_inline(attr, target),
|
||||||
"repr" => self.check_repr(attr, target),
|
"repr" => self.check_repr(attr, target),
|
||||||
_ => (),
|
_ => (),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1296,7 +1296,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
let attrs = self.lower_attrs(&i.attrs);
|
let attrs = self.lower_attrs(&i.attrs);
|
||||||
let mut vis = self.lower_visibility(&i.vis);
|
let mut vis = self.lower_visibility(&i.vis);
|
||||||
if let ItemKind::MacroDef(ref tts) = i.node {
|
if let ItemKind::MacroDef(ref tts) = i.node {
|
||||||
if i.attrs.iter().any(|attr| attr.name() == "macro_export") {
|
if i.attrs.iter().any(|attr| attr.path == "macro_export") {
|
||||||
self.exported_macros.push(hir::MacroDef {
|
self.exported_macros.push(hir::MacroDef {
|
||||||
name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(),
|
name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(),
|
||||||
});
|
});
|
||||||
|
|
|
@ -408,14 +408,14 @@ pub fn gather_attrs(attrs: &[ast::Attribute]) -> Vec<Result<(ast::Name, Level, S
|
||||||
pub fn gather_attr(attr: &ast::Attribute) -> Vec<Result<(ast::Name, Level, Span), Span>> {
|
pub fn gather_attr(attr: &ast::Attribute) -> Vec<Result<(ast::Name, Level, Span), Span>> {
|
||||||
let mut out = vec![];
|
let mut out = vec![];
|
||||||
|
|
||||||
let level = match Level::from_str(&attr.name().as_str()) {
|
let level = match attr.name().and_then(|name| Level::from_str(&name.as_str())) {
|
||||||
None => return out,
|
None => return out,
|
||||||
Some(lvl) => lvl,
|
Some(lvl) => lvl,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let meta = unwrap_or!(attr.meta(), return out);
|
||||||
attr::mark_used(attr);
|
attr::mark_used(attr);
|
||||||
|
|
||||||
let meta = &attr.value;
|
|
||||||
let metas = if let Some(metas) = meta.meta_item_list() {
|
let metas = if let Some(metas) = meta.meta_item_list() {
|
||||||
metas
|
metas
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -197,7 +197,7 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> {
|
||||||
} else {
|
} else {
|
||||||
// Emit errors for non-staged-api crates.
|
// Emit errors for non-staged-api crates.
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
let tag = attr.name();
|
let tag = unwrap_or!(attr.name(), continue);
|
||||||
if tag == "unstable" || tag == "stable" || tag == "rustc_deprecated" {
|
if tag == "unstable" || tag == "stable" || tag == "rustc_deprecated" {
|
||||||
attr::mark_used(attr);
|
attr::mark_used(attr);
|
||||||
self.tcx.sess.span_err(attr.span(), "stability attributes may not be used \
|
self.tcx.sess.span_err(attr.span(), "stability attributes may not be used \
|
||||||
|
@ -402,7 +402,7 @@ impl<'a, 'tcx> Index<'tcx> {
|
||||||
|
|
||||||
let mut is_staged_api = false;
|
let mut is_staged_api = false;
|
||||||
for attr in &krate.attrs {
|
for attr in &krate.attrs {
|
||||||
if attr.name() == "stable" || attr.name() == "unstable" {
|
if attr.path == "stable" || attr.path == "unstable" {
|
||||||
is_staged_api = true;
|
is_staged_api = true;
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
|
@ -274,7 +274,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
.filter(|a| a.check_name("rustc_on_unimplemented"))
|
.filter(|a| a.check_name("rustc_on_unimplemented"))
|
||||||
.next()
|
.next()
|
||||||
{
|
{
|
||||||
let err_sp = item.meta().span.substitute_dummy(span);
|
let err_sp = item.span.substitute_dummy(span);
|
||||||
let trait_str = self.tcx.item_path_str(trait_ref.def_id);
|
let trait_str = self.tcx.item_path_str(trait_ref.def_id);
|
||||||
if let Some(istring) = item.value_str() {
|
if let Some(istring) = item.value_str() {
|
||||||
let istring = &*istring.as_str();
|
let istring = &*istring.as_str();
|
||||||
|
|
|
@ -18,16 +18,15 @@ use syntax::abi::Abi;
|
||||||
use syntax::ast::{self, Name, NodeId};
|
use syntax::ast::{self, Name, NodeId};
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::symbol::{Symbol, InternedString};
|
use syntax::symbol::InternedString;
|
||||||
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
|
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream;
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::*;
|
use rustc::hir::*;
|
||||||
use rustc::hir::def::Def;
|
use rustc::hir::def::Def;
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::intravisit as visit;
|
use rustc::hir::intravisit::{self as visit, Visitor};
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc_data_structures::fnv;
|
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
use super::def_path_hash::DefPathHashes;
|
use super::def_path_hash::DefPathHashes;
|
||||||
|
@ -559,7 +558,7 @@ macro_rules! hash_span {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
impl<'a, 'hash, 'tcx> Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||||
fn nested_visit_map<'this>(&'this mut self) -> visit::NestedVisitorMap<'this, 'tcx> {
|
fn nested_visit_map<'this>(&'this mut self) -> visit::NestedVisitorMap<'this, 'tcx> {
|
||||||
if self.hash_bodies {
|
if self.hash_bodies {
|
||||||
visit::NestedVisitorMap::OnlyBodies(&self.tcx.hir)
|
visit::NestedVisitorMap::OnlyBodies(&self.tcx.hir)
|
||||||
|
@ -960,50 +959,24 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_meta_item(&mut self, meta_item: &ast::MetaItem) {
|
|
||||||
debug!("hash_meta_item: st={:?}", self.st);
|
|
||||||
|
|
||||||
// ignoring span information, it doesn't matter here
|
|
||||||
self.hash_discriminant(&meta_item.node);
|
|
||||||
meta_item.name.as_str().len().hash(self.st);
|
|
||||||
meta_item.name.as_str().hash(self.st);
|
|
||||||
|
|
||||||
match meta_item.node {
|
|
||||||
ast::MetaItemKind::Word => {}
|
|
||||||
ast::MetaItemKind::NameValue(ref lit) => saw_lit(lit).hash(self.st),
|
|
||||||
ast::MetaItemKind::List(ref items) => {
|
|
||||||
// Sort subitems so the hash does not depend on their order
|
|
||||||
let indices = self.indices_sorted_by(&items, |p| {
|
|
||||||
(p.name().map(Symbol::as_str), fnv::hash(&p.literal().map(saw_lit)))
|
|
||||||
});
|
|
||||||
items.len().hash(self.st);
|
|
||||||
for (index, &item_index) in indices.iter().enumerate() {
|
|
||||||
index.hash(self.st);
|
|
||||||
let nested_meta_item: &ast::NestedMetaItemKind = &items[item_index].node;
|
|
||||||
self.hash_discriminant(nested_meta_item);
|
|
||||||
match *nested_meta_item {
|
|
||||||
ast::NestedMetaItemKind::MetaItem(ref meta_item) => {
|
|
||||||
self.hash_meta_item(meta_item);
|
|
||||||
}
|
|
||||||
ast::NestedMetaItemKind::Literal(ref lit) => {
|
|
||||||
saw_lit(lit).hash(self.st);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash_attributes(&mut self, attributes: &[ast::Attribute]) {
|
pub fn hash_attributes(&mut self, attributes: &[ast::Attribute]) {
|
||||||
debug!("hash_attributes: st={:?}", self.st);
|
debug!("hash_attributes: st={:?}", self.st);
|
||||||
let indices = self.indices_sorted_by(attributes, |attr| attr.name());
|
let indices = self.indices_sorted_by(attributes, |attr| attr.name());
|
||||||
|
|
||||||
for i in indices {
|
for i in indices {
|
||||||
let attr = &attributes[i];
|
let attr = &attributes[i];
|
||||||
if !attr.is_sugared_doc &&
|
match attr.name() {
|
||||||
!IGNORED_ATTRIBUTES.contains(&&*attr.value.name().as_str()) {
|
Some(name) if IGNORED_ATTRIBUTES.contains(&&*name.as_str()) => continue,
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
if !attr.is_sugared_doc {
|
||||||
SawAttribute(attr.style).hash(self.st);
|
SawAttribute(attr.style).hash(self.st);
|
||||||
self.hash_meta_item(&attr.value);
|
for segment in &attr.path.segments {
|
||||||
|
SawIdent(segment.identifier.name.as_str()).hash(self.st);
|
||||||
|
}
|
||||||
|
for tt in attr.tokens.trees() {
|
||||||
|
self.hash_token_tree(&tt);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,9 +104,9 @@ pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
|
||||||
|
|
||||||
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
|
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
|
||||||
fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
|
fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
|
||||||
for item in attr.meta_item_list().unwrap_or(&[]) {
|
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
||||||
if item.check_name(LABEL) {
|
if item.check_name(LABEL) {
|
||||||
let value = expect_associated_value(self.tcx, item);
|
let value = expect_associated_value(self.tcx, &item);
|
||||||
match DepNode::from_label_string(&value.as_str(), def_id) {
|
match DepNode::from_label_string(&value.as_str(), def_id) {
|
||||||
Ok(def_id) => return def_id,
|
Ok(def_id) => return def_id,
|
||||||
Err(()) => {
|
Err(()) => {
|
||||||
|
@ -331,9 +331,9 @@ fn check_config(tcx: TyCtxt, attr: &Attribute) -> bool {
|
||||||
debug!("check_config(attr={:?})", attr);
|
debug!("check_config(attr={:?})", attr);
|
||||||
let config = &tcx.sess.parse_sess.config;
|
let config = &tcx.sess.parse_sess.config;
|
||||||
debug!("check_config: config={:?}", config);
|
debug!("check_config: config={:?}", config);
|
||||||
for item in attr.meta_item_list().unwrap_or(&[]) {
|
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
||||||
if item.check_name(CFG) {
|
if item.check_name(CFG) {
|
||||||
let value = expect_associated_value(tcx, item);
|
let value = expect_associated_value(tcx, &item);
|
||||||
debug!("check_config: searching for cfg {:?}", value);
|
debug!("check_config: searching for cfg {:?}", value);
|
||||||
return config.contains(&(value, None));
|
return config.contains(&(value, None));
|
||||||
}
|
}
|
||||||
|
|
|
@ -312,7 +312,7 @@ impl MissingDoc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let has_doc = attrs.iter().any(|a| a.is_value_str() && a.name() == "doc");
|
let has_doc = attrs.iter().any(|a| a.is_value_str() && a.check_name("doc"));
|
||||||
if !has_doc {
|
if !has_doc {
|
||||||
cx.span_lint(MISSING_DOCS,
|
cx.span_lint(MISSING_DOCS,
|
||||||
sp,
|
sp,
|
||||||
|
@ -635,7 +635,7 @@ impl LintPass for DeprecatedAttr {
|
||||||
|
|
||||||
impl EarlyLintPass for DeprecatedAttr {
|
impl EarlyLintPass for DeprecatedAttr {
|
||||||
fn check_attribute(&mut self, cx: &EarlyContext, attr: &ast::Attribute) {
|
fn check_attribute(&mut self, cx: &EarlyContext, attr: &ast::Attribute) {
|
||||||
let name = attr.name();
|
let name = unwrap_or!(attr.name(), return);
|
||||||
for &&(n, _, ref g) in &self.depr_attrs {
|
for &&(n, _, ref g) in &self.depr_attrs {
|
||||||
if name == n {
|
if name == n {
|
||||||
if let &AttributeGate::Gated(Stability::Deprecated(link),
|
if let &AttributeGate::Gated(Stability::Deprecated(link),
|
||||||
|
@ -1121,8 +1121,8 @@ impl LintPass for UnstableFeatures {
|
||||||
|
|
||||||
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnstableFeatures {
|
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnstableFeatures {
|
||||||
fn check_attribute(&mut self, ctx: &LateContext, attr: &ast::Attribute) {
|
fn check_attribute(&mut self, ctx: &LateContext, attr: &ast::Attribute) {
|
||||||
if attr.meta().check_name("feature") {
|
if attr.check_name("feature") {
|
||||||
if let Some(items) = attr.meta().meta_item_list() {
|
if let Some(items) = attr.meta_item_list() {
|
||||||
for item in items {
|
for item in items {
|
||||||
ctx.span_lint(UNSTABLE_FEATURES, item.span(), "unstable feature");
|
ctx.span_lint(UNSTABLE_FEATURES, item.span(), "unstable feature");
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@
|
||||||
#![feature(slice_patterns)]
|
#![feature(slice_patterns)]
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
extern crate syntax;
|
extern crate syntax;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rustc;
|
extern crate rustc;
|
||||||
|
|
|
@ -269,6 +269,7 @@ impl LintPass for UnusedAttributes {
|
||||||
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes {
|
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes {
|
||||||
fn check_attribute(&mut self, cx: &LateContext, attr: &ast::Attribute) {
|
fn check_attribute(&mut self, cx: &LateContext, attr: &ast::Attribute) {
|
||||||
debug!("checking attribute: {:?}", attr);
|
debug!("checking attribute: {:?}", attr);
|
||||||
|
let name = unwrap_or!(attr.name(), return);
|
||||||
|
|
||||||
// Note that check_name() marks the attribute as used if it matches.
|
// Note that check_name() marks the attribute as used if it matches.
|
||||||
for &(ref name, ty, _) in BUILTIN_ATTRIBUTES {
|
for &(ref name, ty, _) in BUILTIN_ATTRIBUTES {
|
||||||
|
@ -294,13 +295,13 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes {
|
||||||
cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute");
|
cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute");
|
||||||
// Is it a builtin attribute that must be used at the crate level?
|
// Is it a builtin attribute that must be used at the crate level?
|
||||||
let known_crate = BUILTIN_ATTRIBUTES.iter()
|
let known_crate = BUILTIN_ATTRIBUTES.iter()
|
||||||
.find(|&&(name, ty, _)| attr.name() == name && ty == AttributeType::CrateLevel)
|
.find(|&&(builtin, ty, _)| name == builtin && ty == AttributeType::CrateLevel)
|
||||||
.is_some();
|
.is_some();
|
||||||
|
|
||||||
// Has a plugin registered this attribute as one which must be used at
|
// Has a plugin registered this attribute as one which must be used at
|
||||||
// the crate level?
|
// the crate level?
|
||||||
let plugin_crate = plugin_attributes.iter()
|
let plugin_crate = plugin_attributes.iter()
|
||||||
.find(|&&(ref x, t)| attr.name() == &**x && AttributeType::CrateLevel == t)
|
.find(|&&(ref x, t)| name == &**x && AttributeType::CrateLevel == t)
|
||||||
.is_some();
|
.is_some();
|
||||||
if known_crate || plugin_crate {
|
if known_crate || plugin_crate {
|
||||||
let msg = match attr.style {
|
let msg = match attr.style {
|
||||||
|
|
|
@ -973,9 +973,11 @@ impl<'a> CrateLoader<'a> {
|
||||||
|
|
||||||
impl<'a> CrateLoader<'a> {
|
impl<'a> CrateLoader<'a> {
|
||||||
pub fn preprocess(&mut self, krate: &ast::Crate) {
|
pub fn preprocess(&mut self, krate: &ast::Crate) {
|
||||||
for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") {
|
for attr in &krate.attrs {
|
||||||
if let Some(linkarg) = attr.value_str() {
|
if attr.path == "link_args" {
|
||||||
self.cstore.add_used_link_args(&linkarg.as_str());
|
if let Some(linkarg) = attr.value_str() {
|
||||||
|
self.cstore.add_used_link_args(&linkarg.as_str());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -269,9 +269,12 @@ impl CrateMetadata {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_staged_api(&self) -> bool {
|
pub fn is_staged_api(&self) -> bool {
|
||||||
self.get_item_attrs(CRATE_DEF_INDEX)
|
for attr in self.get_item_attrs(CRATE_DEF_INDEX) {
|
||||||
.iter()
|
if attr.path == "stable" || attr.path == "unstable" {
|
||||||
.any(|attr| attr.name() == "stable" || attr.name() == "unstable")
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_allocator(&self) -> bool {
|
pub fn is_allocator(&self) -> bool {
|
||||||
|
|
|
@ -241,12 +241,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
ItemKind::Mod(_) => {
|
ItemKind::Mod(_) => {
|
||||||
// Ensure that `path` attributes on modules are recorded as used (c.f. #35584).
|
// Ensure that `path` attributes on modules are recorded as used (c.f. #35584).
|
||||||
attr::first_attr_value_str_by_name(&item.attrs, "path");
|
attr::first_attr_value_str_by_name(&item.attrs, "path");
|
||||||
if let Some(attr) =
|
if item.attrs.iter().any(|attr| attr.check_name("warn_directory_ownership")) {
|
||||||
item.attrs.iter().find(|attr| attr.name() == "warn_directory_ownership") {
|
|
||||||
let lint = lint::builtin::LEGACY_DIRECTORY_OWNERSHIP;
|
let lint = lint::builtin::LEGACY_DIRECTORY_OWNERSHIP;
|
||||||
let msg = "cannot declare a new module at this location";
|
let msg = "cannot declare a new module at this location";
|
||||||
self.session.add_lint(lint, item.id, item.span, msg.to_string());
|
self.session.add_lint(lint, item.id, item.span, msg.to_string());
|
||||||
attr::mark_used(attr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ItemKind::Union(ref vdata, _) => {
|
ItemKind::Union(ref vdata, _) => {
|
||||||
|
|
|
@ -1165,6 +1165,7 @@ pub struct Resolver<'a> {
|
||||||
|
|
||||||
privacy_errors: Vec<PrivacyError<'a>>,
|
privacy_errors: Vec<PrivacyError<'a>>,
|
||||||
ambiguity_errors: Vec<AmbiguityError<'a>>,
|
ambiguity_errors: Vec<AmbiguityError<'a>>,
|
||||||
|
gated_errors: FxHashSet<Span>,
|
||||||
disallowed_shadowing: Vec<&'a LegacyBinding<'a>>,
|
disallowed_shadowing: Vec<&'a LegacyBinding<'a>>,
|
||||||
|
|
||||||
arenas: &'a ResolverArenas<'a>,
|
arenas: &'a ResolverArenas<'a>,
|
||||||
|
@ -1355,6 +1356,7 @@ impl<'a> Resolver<'a> {
|
||||||
|
|
||||||
privacy_errors: Vec::new(),
|
privacy_errors: Vec::new(),
|
||||||
ambiguity_errors: Vec::new(),
|
ambiguity_errors: Vec::new(),
|
||||||
|
gated_errors: FxHashSet(),
|
||||||
disallowed_shadowing: Vec::new(),
|
disallowed_shadowing: Vec::new(),
|
||||||
|
|
||||||
arenas: arenas,
|
arenas: arenas,
|
||||||
|
@ -3359,8 +3361,9 @@ impl<'a> Resolver<'a> {
|
||||||
if self.proc_macro_enabled { return; }
|
if self.proc_macro_enabled { return; }
|
||||||
|
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
let maybe_binding = self.builtin_macros.get(&attr.name()).cloned().or_else(|| {
|
let name = unwrap_or!(attr.name(), continue);
|
||||||
let ident = Ident::with_empty_ctxt(attr.name());
|
let maybe_binding = self.builtin_macros.get(&name).cloned().or_else(|| {
|
||||||
|
let ident = Ident::with_empty_ctxt(name);
|
||||||
self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok()
|
self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -28,8 +28,11 @@ use syntax::ext::placeholders::placeholder;
|
||||||
use syntax::ext::tt::macro_rules;
|
use syntax::ext::tt::macro_rules;
|
||||||
use syntax::feature_gate::{self, emit_feature_err, GateIssue};
|
use syntax::feature_gate::{self, emit_feature_err, GateIssue};
|
||||||
use syntax::fold::{self, Folder};
|
use syntax::fold::{self, Folder};
|
||||||
|
use syntax::parse::parser::PathStyle;
|
||||||
|
use syntax::parse::token::{self, Token};
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::{Symbol, keywords};
|
use syntax::symbol::{Symbol, keywords};
|
||||||
|
use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
|
||||||
use syntax::util::lev_distance::find_best_match_for_name;
|
use syntax::util::lev_distance::find_best_match_for_name;
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
use syntax_pos::{Span, DUMMY_SP};
|
||||||
|
|
||||||
|
@ -179,12 +182,14 @@ impl<'a> base::Resolver for Resolver<'a> {
|
||||||
fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>)
|
fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>)
|
||||||
-> Option<ast::Attribute> {
|
-> Option<ast::Attribute> {
|
||||||
for i in 0..attrs.len() {
|
for i in 0..attrs.len() {
|
||||||
|
let name = unwrap_or!(attrs[i].name(), continue);
|
||||||
|
|
||||||
if self.session.plugin_attributes.borrow().iter()
|
if self.session.plugin_attributes.borrow().iter()
|
||||||
.any(|&(ref attr_nm, _)| attrs[i].name() == &**attr_nm) {
|
.any(|&(ref attr_nm, _)| name == &**attr_nm) {
|
||||||
attr::mark_known(&attrs[i]);
|
attr::mark_known(&attrs[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.builtin_macros.get(&attrs[i].name()).cloned() {
|
match self.builtin_macros.get(&name).cloned() {
|
||||||
Some(binding) => match *binding.get_macro(self) {
|
Some(binding) => match *binding.get_macro(self) {
|
||||||
MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => {
|
MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => {
|
||||||
return Some(attrs.remove(i))
|
return Some(attrs.remove(i))
|
||||||
|
@ -197,17 +202,25 @@ impl<'a> base::Resolver for Resolver<'a> {
|
||||||
|
|
||||||
// Check for legacy derives
|
// Check for legacy derives
|
||||||
for i in 0..attrs.len() {
|
for i in 0..attrs.len() {
|
||||||
if attrs[i].name() == "derive" {
|
let name = unwrap_or!(attrs[i].name(), continue);
|
||||||
let mut traits = match attrs[i].meta_item_list() {
|
|
||||||
Some(traits) if !traits.is_empty() => traits.to_owned(),
|
if name == "derive" {
|
||||||
_ => continue,
|
let result = attrs[i].parse_list(&self.session.parse_sess,
|
||||||
|
|parser| parser.parse_path(PathStyle::Mod));
|
||||||
|
let mut traits = match result {
|
||||||
|
Ok(traits) => traits,
|
||||||
|
Err(mut e) => {
|
||||||
|
e.cancel();
|
||||||
|
continue
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for j in 0..traits.len() {
|
for j in 0..traits.len() {
|
||||||
let legacy_name = Symbol::intern(&match traits[j].word() {
|
if traits[j].segments.len() > 1 {
|
||||||
Some(..) => format!("derive_{}", traits[j].name().unwrap()),
|
continue
|
||||||
None => continue,
|
}
|
||||||
});
|
let trait_name = traits[j].segments[0].identifier.name;
|
||||||
|
let legacy_name = Symbol::intern(&format!("derive_{}", trait_name));
|
||||||
if !self.builtin_macros.contains_key(&legacy_name) {
|
if !self.builtin_macros.contains_key(&legacy_name) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -216,18 +229,27 @@ impl<'a> base::Resolver for Resolver<'a> {
|
||||||
if traits.is_empty() {
|
if traits.is_empty() {
|
||||||
attrs.remove(i);
|
attrs.remove(i);
|
||||||
} else {
|
} else {
|
||||||
attrs[i].value = ast::MetaItem {
|
let mut tokens = Vec::new();
|
||||||
name: attrs[i].name(),
|
for (i, path) in traits.iter().enumerate() {
|
||||||
span: attrs[i].span,
|
if i > 0 {
|
||||||
node: ast::MetaItemKind::List(traits),
|
tokens.push(TokenTree::Token(attrs[i].span, Token::Comma).into());
|
||||||
};
|
}
|
||||||
|
for (j, segment) in path.segments.iter().enumerate() {
|
||||||
|
if j > 0 {
|
||||||
|
tokens.push(TokenTree::Token(path.span, Token::ModSep).into());
|
||||||
|
}
|
||||||
|
let tok = Token::Ident(segment.identifier);
|
||||||
|
tokens.push(TokenTree::Token(path.span, tok).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited {
|
||||||
|
delim: token::Paren,
|
||||||
|
tts: TokenStream::concat(tokens).into(),
|
||||||
|
}).into();
|
||||||
}
|
}
|
||||||
return Some(ast::Attribute {
|
return Some(ast::Attribute {
|
||||||
value: ast::MetaItem {
|
path: ast::Path::from_ident(span, Ident::with_empty_ctxt(legacy_name)),
|
||||||
name: legacy_name,
|
tokens: TokenStream::empty(),
|
||||||
span: span,
|
|
||||||
node: ast::MetaItemKind::Word,
|
|
||||||
},
|
|
||||||
id: attr::mk_attr_id(),
|
id: attr::mk_attr_id(),
|
||||||
style: ast::AttrStyle::Outer,
|
style: ast::AttrStyle::Outer,
|
||||||
is_sugared_doc: false,
|
is_sugared_doc: false,
|
||||||
|
@ -267,28 +289,27 @@ impl<'a> Resolver<'a> {
|
||||||
InvocationKind::Bang { ref mac, .. } => {
|
InvocationKind::Bang { ref mac, .. } => {
|
||||||
return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force);
|
return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force);
|
||||||
}
|
}
|
||||||
InvocationKind::Derive { name, span, .. } => {
|
InvocationKind::Derive { ref path, .. } => {
|
||||||
let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
|
return self.resolve_macro_to_def(scope, path, MacroKind::Derive, force);
|
||||||
return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let (attr_name, path) = {
|
|
||||||
let attr = attr.as_ref().unwrap();
|
|
||||||
(attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut determined = true;
|
let path = attr.as_ref().unwrap().path.clone();
|
||||||
|
let mut determinacy = Determinacy::Determined;
|
||||||
match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) {
|
match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) {
|
||||||
Ok(def) => return Ok(def),
|
Ok(def) => return Ok(def),
|
||||||
Err(Determinacy::Undetermined) => determined = false,
|
Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined,
|
||||||
Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
|
Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
|
||||||
Err(Determinacy::Determined) => {}
|
Err(Determinacy::Determined) => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
for &(name, span) in traits {
|
let attr_name = match path.segments.len() {
|
||||||
let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
|
1 => path.segments[0].identifier.name,
|
||||||
match self.resolve_macro(scope, &path, MacroKind::Derive, force) {
|
_ => return Err(determinacy),
|
||||||
|
};
|
||||||
|
for path in traits {
|
||||||
|
match self.resolve_macro(scope, path, MacroKind::Derive, force) {
|
||||||
Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
|
Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
|
||||||
if inert_attrs.contains(&attr_name) {
|
if inert_attrs.contains(&attr_name) {
|
||||||
// FIXME(jseyfried) Avoid `mem::replace` here.
|
// FIXME(jseyfried) Avoid `mem::replace` here.
|
||||||
|
@ -307,12 +328,12 @@ impl<'a> Resolver<'a> {
|
||||||
}
|
}
|
||||||
return Err(Determinacy::Undetermined);
|
return Err(Determinacy::Undetermined);
|
||||||
},
|
},
|
||||||
Err(Determinacy::Undetermined) => determined = false,
|
Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined,
|
||||||
Err(Determinacy::Determined) => {}
|
Err(Determinacy::Determined) => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
|
Err(determinacy)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
|
fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
|
||||||
|
@ -331,7 +352,7 @@ impl<'a> Resolver<'a> {
|
||||||
self.current_module = invocation.module.get();
|
self.current_module = invocation.module.get();
|
||||||
|
|
||||||
if path.len() > 1 {
|
if path.len() > 1 {
|
||||||
if !self.use_extern_macros {
|
if !self.use_extern_macros && self.gated_errors.insert(span) {
|
||||||
let msg = "non-ident macro paths are experimental";
|
let msg = "non-ident macro paths are experimental";
|
||||||
let feature = "use_extern_macros";
|
let feature = "use_extern_macros";
|
||||||
emit_feature_err(&self.session.parse_sess, feature, span, GateIssue::Language, msg);
|
emit_feature_err(&self.session.parse_sess, feature, span, GateIssue::Language, msg);
|
||||||
|
|
|
@ -14,7 +14,6 @@ use rustc::ty::TyCtxt;
|
||||||
use syntax::ast::{self, NodeId};
|
use syntax::ast::{self, NodeId};
|
||||||
use syntax::codemap::CodeMap;
|
use syntax::codemap::CodeMap;
|
||||||
use syntax::print::pprust;
|
use syntax::print::pprust;
|
||||||
use syntax::symbol::Symbol;
|
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
use data::{self, Visibility, SigElement};
|
use data::{self, Visibility, SigElement};
|
||||||
|
@ -77,10 +76,9 @@ impl Lower for Vec<ast::Attribute> {
|
||||||
type Target = Vec<Attribute>;
|
type Target = Vec<Attribute>;
|
||||||
|
|
||||||
fn lower(self, tcx: TyCtxt) -> Vec<Attribute> {
|
fn lower(self, tcx: TyCtxt) -> Vec<Attribute> {
|
||||||
let doc = Symbol::intern("doc");
|
|
||||||
self.into_iter()
|
self.into_iter()
|
||||||
// Only retain real attributes. Doc comments are lowered separately.
|
// Only retain real attributes. Doc comments are lowered separately.
|
||||||
.filter(|attr| attr.name() != doc)
|
.filter(|attr| attr.path != "doc")
|
||||||
.map(|mut attr| {
|
.map(|mut attr| {
|
||||||
// Remove the surrounding '#[..]' or '#![..]' of the pretty printed
|
// Remove the surrounding '#[..]' or '#![..]' of the pretty printed
|
||||||
// attribute. First normalize all inner attribute (#![..]) to outer
|
// attribute. First normalize all inner attribute (#![..]) to outer
|
||||||
|
|
|
@ -54,7 +54,7 @@ use std::path::{Path, PathBuf};
|
||||||
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
|
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
|
||||||
use syntax::parse::lexer::comments::strip_doc_comment_decoration;
|
use syntax::parse::lexer::comments::strip_doc_comment_decoration;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::symbol::{Symbol, keywords};
|
use syntax::symbol::keywords;
|
||||||
use syntax::visit::{self, Visitor};
|
use syntax::visit::{self, Visitor};
|
||||||
use syntax::print::pprust::{ty_to_string, arg_to_string};
|
use syntax::print::pprust::{ty_to_string, arg_to_string};
|
||||||
use syntax::codemap::MacroAttribute;
|
use syntax::codemap::MacroAttribute;
|
||||||
|
@ -829,11 +829,10 @@ impl<'a> Visitor<'a> for PathCollector {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn docs_for_attrs(attrs: &[Attribute]) -> String {
|
fn docs_for_attrs(attrs: &[Attribute]) -> String {
|
||||||
let doc = Symbol::intern("doc");
|
|
||||||
let mut result = String::new();
|
let mut result = String::new();
|
||||||
|
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
if attr.name() == doc {
|
if attr.check_name("doc") {
|
||||||
if let Some(val) = attr.value_str() {
|
if let Some(val) = attr.value_str() {
|
||||||
if attr.is_sugared_doc {
|
if attr.is_sugared_doc {
|
||||||
result.push_str(&strip_doc_comment_decoration(&val.as_str()));
|
result.push_str(&strip_doc_comment_decoration(&val.as_str()));
|
||||||
|
|
|
@ -113,7 +113,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn field(&self, attr: &ast::Attribute, name: &str) -> ast::Name {
|
fn field(&self, attr: &ast::Attribute, name: &str) -> ast::Name {
|
||||||
for item in attr.meta_item_list().unwrap_or(&[]) {
|
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
||||||
if item.check_name(name) {
|
if item.check_name(name) {
|
||||||
if let Some(value) = item.value_str() {
|
if let Some(value) = item.value_str() {
|
||||||
return value;
|
return value;
|
||||||
|
|
|
@ -39,12 +39,11 @@ use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
|
|
||||||
|
use std::{mem, slice, vec};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::slice;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::u32;
|
use std::u32;
|
||||||
use std::mem;
|
|
||||||
|
|
||||||
use core::DocContext;
|
use core::DocContext;
|
||||||
use doctree;
|
use doctree;
|
||||||
|
@ -472,12 +471,12 @@ impl Clean<Item> for doctree::Module {
|
||||||
|
|
||||||
pub struct ListAttributesIter<'a> {
|
pub struct ListAttributesIter<'a> {
|
||||||
attrs: slice::Iter<'a, ast::Attribute>,
|
attrs: slice::Iter<'a, ast::Attribute>,
|
||||||
current_list: slice::Iter<'a, ast::NestedMetaItem>,
|
current_list: vec::IntoIter<ast::NestedMetaItem>,
|
||||||
name: &'a str
|
name: &'a str
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for ListAttributesIter<'a> {
|
impl<'a> Iterator for ListAttributesIter<'a> {
|
||||||
type Item = &'a ast::NestedMetaItem;
|
type Item = ast::NestedMetaItem;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if let Some(nested) = self.current_list.next() {
|
if let Some(nested) = self.current_list.next() {
|
||||||
|
@ -485,9 +484,9 @@ impl<'a> Iterator for ListAttributesIter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
for attr in &mut self.attrs {
|
for attr in &mut self.attrs {
|
||||||
if let Some(ref list) = attr.meta_item_list() {
|
if let Some(list) = attr.meta_item_list() {
|
||||||
if attr.check_name(self.name) {
|
if attr.check_name(self.name) {
|
||||||
self.current_list = list.iter();
|
self.current_list = list.into_iter();
|
||||||
if let Some(nested) = self.current_list.next() {
|
if let Some(nested) = self.current_list.next() {
|
||||||
return Some(nested);
|
return Some(nested);
|
||||||
}
|
}
|
||||||
|
@ -508,7 +507,7 @@ impl AttributesExt for [ast::Attribute] {
|
||||||
fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a> {
|
fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a> {
|
||||||
ListAttributesIter {
|
ListAttributesIter {
|
||||||
attrs: self.iter(),
|
attrs: self.iter(),
|
||||||
current_list: [].iter(),
|
current_list: Vec::new().into_iter(),
|
||||||
name: name
|
name: name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -519,7 +518,7 @@ pub trait NestedAttributesExt {
|
||||||
fn has_word(self, &str) -> bool;
|
fn has_word(self, &str) -> bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, I: IntoIterator<Item=&'a ast::NestedMetaItem>> NestedAttributesExt for I {
|
impl<I: IntoIterator<Item=ast::NestedMetaItem>> NestedAttributesExt for I {
|
||||||
fn has_word(self, word: &str) -> bool {
|
fn has_word(self, word: &str) -> bool {
|
||||||
self.into_iter().any(|attr| attr.is_word() && attr.check_name(word))
|
self.into_iter().any(|attr| attr.is_word() && attr.check_name(word))
|
||||||
}
|
}
|
||||||
|
@ -2596,9 +2595,9 @@ impl Clean<Vec<Item>> for doctree::Import {
|
||||||
// #[doc(no_inline)] attribute is present.
|
// #[doc(no_inline)] attribute is present.
|
||||||
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
|
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
|
||||||
let denied = self.vis != hir::Public || self.attrs.iter().any(|a| {
|
let denied = self.vis != hir::Public || self.attrs.iter().any(|a| {
|
||||||
a.name() == "doc" && match a.meta_item_list() {
|
a.name().unwrap() == "doc" && match a.meta_item_list() {
|
||||||
Some(l) => attr::list_contains_name(l, "no_inline") ||
|
Some(l) => attr::list_contains_name(&l, "no_inline") ||
|
||||||
attr::list_contains_name(l, "hidden"),
|
attr::list_contains_name(&l, "hidden"),
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -2620,11 +2620,11 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
|
||||||
let mut attrs = String::new();
|
let mut attrs = String::new();
|
||||||
|
|
||||||
for attr in &it.attrs.other_attrs {
|
for attr in &it.attrs.other_attrs {
|
||||||
let name = attr.name();
|
let name = attr.name().unwrap();
|
||||||
if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
|
if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if let Some(s) = render_attribute(attr.meta()) {
|
if let Some(s) = render_attribute(&attr.meta().unwrap()) {
|
||||||
attrs.push_str(&format!("#[{}]\n", s));
|
attrs.push_str(&format!("#[{}]\n", s));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -137,13 +137,13 @@ fn scrape_test_config(krate: &::rustc::hir::Crate) -> TestOptions {
|
||||||
attrs: Vec::new(),
|
attrs: Vec::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let attrs = krate.attrs.iter()
|
let test_attrs: Vec<_> = krate.attrs.iter()
|
||||||
.filter(|a| a.check_name("doc"))
|
.filter(|a| a.check_name("doc"))
|
||||||
.filter_map(|a| a.meta_item_list())
|
.flat_map(|a| a.meta_item_list().unwrap_or_else(Vec::new))
|
||||||
.flat_map(|l| l)
|
.filter(|a| a.check_name("test"))
|
||||||
.filter(|a| a.check_name("test"))
|
.collect();
|
||||||
.filter_map(|a| a.meta_item_list())
|
let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[]));
|
||||||
.flat_map(|l| l);
|
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
if attr.check_name("no_crate_inject") {
|
if attr.check_name("no_crate_inject") {
|
||||||
opts.no_crate_inject = true;
|
opts.no_crate_inject = true;
|
||||||
|
|
|
@ -376,7 +376,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
||||||
if item.vis == hir::Public && self.inside_public_path {
|
if item.vis == hir::Public && self.inside_public_path {
|
||||||
let please_inline = item.attrs.iter().any(|item| {
|
let please_inline = item.attrs.iter().any(|item| {
|
||||||
match item.meta_item_list() {
|
match item.meta_item_list() {
|
||||||
Some(list) if item.check_name("doc") => {
|
Some(ref list) if item.check_name("doc") => {
|
||||||
list.iter().any(|i| i.check_name("inline"))
|
list.iter().any(|i| i.check_name("inline"))
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
|
|
|
@ -116,6 +116,12 @@ pub struct Path {
|
||||||
pub segments: Vec<PathSegment>,
|
pub segments: Vec<PathSegment>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> PartialEq<&'a str> for Path {
|
||||||
|
fn eq(&self, string: &&'a str) -> bool {
|
||||||
|
self.segments.len() == 1 && self.segments[0].identifier.name == *string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Path {
|
impl fmt::Debug for Path {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "path({})", pprust::path_to_string(self))
|
write!(f, "path({})", pprust::path_to_string(self))
|
||||||
|
@ -1681,7 +1687,8 @@ pub struct AttrId(pub usize);
|
||||||
pub struct Attribute {
|
pub struct Attribute {
|
||||||
pub id: AttrId,
|
pub id: AttrId,
|
||||||
pub style: AttrStyle,
|
pub style: AttrStyle,
|
||||||
pub value: MetaItem,
|
pub path: Path,
|
||||||
|
pub tokens: TokenStream,
|
||||||
pub is_sugared_doc: bool,
|
pub is_sugared_doc: bool,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,20 +15,24 @@ pub use self::ReprAttr::*;
|
||||||
pub use self::IntType::*;
|
pub use self::IntType::*;
|
||||||
|
|
||||||
use ast;
|
use ast;
|
||||||
use ast::{AttrId, Attribute, Name};
|
use ast::{AttrId, Attribute, Name, Ident};
|
||||||
use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
|
use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
|
||||||
use ast::{Lit, Expr, Item, Local, Stmt, StmtKind};
|
use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind};
|
||||||
use codemap::{Spanned, spanned, dummy_spanned, mk_sp};
|
use codemap::{Spanned, spanned, dummy_spanned, mk_sp};
|
||||||
use syntax_pos::{Span, BytePos, DUMMY_SP};
|
use syntax_pos::{Span, BytePos, DUMMY_SP};
|
||||||
use errors::Handler;
|
use errors::Handler;
|
||||||
use feature_gate::{Features, GatedCfg};
|
use feature_gate::{Features, GatedCfg};
|
||||||
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||||
use parse::ParseSess;
|
use parse::parser::Parser;
|
||||||
|
use parse::{self, ParseSess, PResult};
|
||||||
|
use parse::token::{self, Token};
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
|
use tokenstream::{TokenStream, TokenTree, Delimited};
|
||||||
use util::ThinVec;
|
use util::ThinVec;
|
||||||
|
|
||||||
use std::cell::{RefCell, Cell};
|
use std::cell::{RefCell, Cell};
|
||||||
|
use std::iter;
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static USED_ATTRS: RefCell<Vec<u64>> = RefCell::new(Vec::new());
|
static USED_ATTRS: RefCell<Vec<u64>> = RefCell::new(Vec::new());
|
||||||
|
@ -185,26 +189,38 @@ impl NestedMetaItem {
|
||||||
|
|
||||||
impl Attribute {
|
impl Attribute {
|
||||||
pub fn check_name(&self, name: &str) -> bool {
|
pub fn check_name(&self, name: &str) -> bool {
|
||||||
let matches = self.name() == name;
|
let matches = self.path == name;
|
||||||
if matches {
|
if matches {
|
||||||
mark_used(self);
|
mark_used(self);
|
||||||
}
|
}
|
||||||
matches
|
matches
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name(&self) -> Name { self.meta().name() }
|
pub fn name(&self) -> Option<Name> {
|
||||||
|
match self.path.segments.len() {
|
||||||
|
1 => Some(self.path.segments[0].identifier.name),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn value_str(&self) -> Option<Symbol> {
|
pub fn value_str(&self) -> Option<Symbol> {
|
||||||
self.meta().value_str()
|
self.meta().and_then(|meta| meta.value_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> {
|
pub fn meta_item_list(&self) -> Option<Vec<NestedMetaItem>> {
|
||||||
self.meta().meta_item_list()
|
match self.meta() {
|
||||||
|
Some(MetaItem { node: MetaItemKind::List(list), .. }) => Some(list),
|
||||||
|
_ => None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_word(&self) -> bool { self.meta().is_word() }
|
pub fn is_word(&self) -> bool {
|
||||||
|
self.path.segments.len() == 1 && self.tokens.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn span(&self) -> Span { self.meta().span }
|
pub fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_meta_item_list(&self) -> bool {
|
pub fn is_meta_item_list(&self) -> bool {
|
||||||
self.meta_item_list().is_some()
|
self.meta_item_list().is_some()
|
||||||
|
@ -225,7 +241,7 @@ impl MetaItem {
|
||||||
match self.node {
|
match self.node {
|
||||||
MetaItemKind::NameValue(ref v) => {
|
MetaItemKind::NameValue(ref v) => {
|
||||||
match v.node {
|
match v.node {
|
||||||
ast::LitKind::Str(ref s, _) => Some((*s).clone()),
|
LitKind::Str(ref s, _) => Some((*s).clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -264,8 +280,66 @@ impl MetaItem {
|
||||||
|
|
||||||
impl Attribute {
|
impl Attribute {
|
||||||
/// Extract the MetaItem from inside this Attribute.
|
/// Extract the MetaItem from inside this Attribute.
|
||||||
pub fn meta(&self) -> &MetaItem {
|
pub fn meta(&self) -> Option<MetaItem> {
|
||||||
&self.value
|
let mut tokens = self.tokens.trees().peekable();
|
||||||
|
Some(MetaItem {
|
||||||
|
name: match self.path.segments.len() {
|
||||||
|
1 => self.path.segments[0].identifier.name,
|
||||||
|
_ => return None,
|
||||||
|
},
|
||||||
|
node: if let Some(node) = MetaItemKind::from_tokens(&mut tokens) {
|
||||||
|
if tokens.peek().is_some() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
node
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
},
|
||||||
|
span: self.span,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
|
||||||
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||||
|
{
|
||||||
|
let mut parser = Parser::new(sess, self.tokens.clone(), None, false);
|
||||||
|
let result = f(&mut parser)?;
|
||||||
|
if parser.token != token::Eof {
|
||||||
|
parser.unexpected()?;
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec<T>>
|
||||||
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||||
|
{
|
||||||
|
if self.tokens.is_empty() {
|
||||||
|
return Ok(Vec::new());
|
||||||
|
}
|
||||||
|
self.parse(sess, |parser| {
|
||||||
|
parser.expect(&token::OpenDelim(token::Paren))?;
|
||||||
|
let mut list = Vec::new();
|
||||||
|
while !parser.eat(&token::CloseDelim(token::Paren)) {
|
||||||
|
list.push(f(parser)?);
|
||||||
|
if !parser.eat(&token::Comma) {
|
||||||
|
parser.expect(&token::CloseDelim(token::Paren))?;
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(list)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> {
|
||||||
|
if self.path.segments.len() > 1 {
|
||||||
|
sess.span_diagnostic.span_err(self.path.span, "expected ident, found path");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(MetaItem {
|
||||||
|
name: self.path.segments.last().unwrap().identifier.name,
|
||||||
|
node: self.parse(sess, |parser| parser.parse_meta_item_kind())?,
|
||||||
|
span: self.span,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert self to a normal #[doc="foo"] comment, if it is a
|
/// Convert self to a normal #[doc="foo"] comment, if it is a
|
||||||
|
@ -293,7 +367,7 @@ impl Attribute {
|
||||||
/* Constructors */
|
/* Constructors */
|
||||||
|
|
||||||
pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem {
|
pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem {
|
||||||
let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked));
|
let value_lit = dummy_spanned(LitKind::Str(value, ast::StrStyle::Cooked));
|
||||||
mk_spanned_name_value_item(DUMMY_SP, name, value_lit)
|
mk_spanned_name_value_item(DUMMY_SP, name, value_lit)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -348,7 +422,8 @@ pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: MetaItem) -> Attribute
|
||||||
Attribute {
|
Attribute {
|
||||||
id: id,
|
id: id,
|
||||||
style: ast::AttrStyle::Inner,
|
style: ast::AttrStyle::Inner,
|
||||||
value: item,
|
path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
|
||||||
|
tokens: item.node.tokens(item.span),
|
||||||
is_sugared_doc: false,
|
is_sugared_doc: false,
|
||||||
span: sp,
|
span: sp,
|
||||||
}
|
}
|
||||||
|
@ -365,7 +440,8 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute
|
||||||
Attribute {
|
Attribute {
|
||||||
id: id,
|
id: id,
|
||||||
style: ast::AttrStyle::Outer,
|
style: ast::AttrStyle::Outer,
|
||||||
value: item,
|
path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
|
||||||
|
tokens: item.node.tokens(item.span),
|
||||||
is_sugared_doc: false,
|
is_sugared_doc: false,
|
||||||
span: sp,
|
span: sp,
|
||||||
}
|
}
|
||||||
|
@ -374,32 +450,25 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute
|
||||||
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos)
|
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos)
|
||||||
-> Attribute {
|
-> Attribute {
|
||||||
let style = doc_comment_style(&text.as_str());
|
let style = doc_comment_style(&text.as_str());
|
||||||
let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked));
|
let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked));
|
||||||
Attribute {
|
Attribute {
|
||||||
id: id,
|
id: id,
|
||||||
style: style,
|
style: style,
|
||||||
value: MetaItem {
|
path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")),
|
||||||
span: mk_sp(lo, hi),
|
tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)),
|
||||||
name: Symbol::intern("doc"),
|
|
||||||
node: MetaItemKind::NameValue(lit),
|
|
||||||
},
|
|
||||||
is_sugared_doc: true,
|
is_sugared_doc: true,
|
||||||
span: mk_sp(lo, hi),
|
span: mk_sp(lo, hi),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool {
|
pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool {
|
||||||
debug!("attr::list_contains_name (name={})", name);
|
|
||||||
items.iter().any(|item| {
|
items.iter().any(|item| {
|
||||||
debug!(" testing: {:?}", item.name());
|
|
||||||
item.check_name(name)
|
item.check_name(name)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contains_name(attrs: &[Attribute], name: &str) -> bool {
|
pub fn contains_name(attrs: &[Attribute], name: &str) -> bool {
|
||||||
debug!("attr::contains_name (name={})", name);
|
|
||||||
attrs.iter().any(|item| {
|
attrs.iter().any(|item| {
|
||||||
debug!(" testing: {}", item.name());
|
|
||||||
item.check_name(name)
|
item.check_name(name)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -452,8 +521,14 @@ pub enum InlineAttr {
|
||||||
/// Determine what `#[inline]` attribute is present in `attrs`, if any.
|
/// Determine what `#[inline]` attribute is present in `attrs`, if any.
|
||||||
pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr {
|
pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr {
|
||||||
attrs.iter().fold(InlineAttr::None, |ia, attr| {
|
attrs.iter().fold(InlineAttr::None, |ia, attr| {
|
||||||
match attr.value.node {
|
if attr.path != "inline" {
|
||||||
_ if attr.value.name != "inline" => ia,
|
return ia;
|
||||||
|
}
|
||||||
|
let meta = match attr.meta() {
|
||||||
|
Some(meta) => meta.node,
|
||||||
|
None => return ia,
|
||||||
|
};
|
||||||
|
match meta {
|
||||||
MetaItemKind::Word => {
|
MetaItemKind::Word => {
|
||||||
mark_used(attr);
|
mark_used(attr);
|
||||||
InlineAttr::Hint
|
InlineAttr::Hint
|
||||||
|
@ -574,14 +649,15 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
|
||||||
let mut rustc_depr: Option<RustcDeprecation> = None;
|
let mut rustc_depr: Option<RustcDeprecation> = None;
|
||||||
|
|
||||||
'outer: for attr in attrs_iter {
|
'outer: for attr in attrs_iter {
|
||||||
let tag = attr.name();
|
if attr.path != "rustc_deprecated" && attr.path != "unstable" && attr.path != "stable" {
|
||||||
if tag != "rustc_deprecated" && tag != "unstable" && tag != "stable" {
|
|
||||||
continue // not a stability level
|
continue // not a stability level
|
||||||
}
|
}
|
||||||
|
|
||||||
mark_used(attr);
|
mark_used(attr);
|
||||||
|
|
||||||
if let Some(metas) = attr.meta_item_list() {
|
let meta = attr.meta();
|
||||||
|
if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta {
|
||||||
|
let meta = meta.as_ref().unwrap();
|
||||||
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
||||||
if item.is_some() {
|
if item.is_some() {
|
||||||
handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
|
handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
|
||||||
|
@ -596,7 +672,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match &*tag.as_str() {
|
match &*meta.name.as_str() {
|
||||||
"rustc_deprecated" => {
|
"rustc_deprecated" => {
|
||||||
if rustc_depr.is_some() {
|
if rustc_depr.is_some() {
|
||||||
span_err!(diagnostic, item_sp, E0540,
|
span_err!(diagnostic, item_sp, E0540,
|
||||||
|
@ -772,7 +848,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler,
|
||||||
let mut depr: Option<Deprecation> = None;
|
let mut depr: Option<Deprecation> = None;
|
||||||
|
|
||||||
'outer: for attr in attrs_iter {
|
'outer: for attr in attrs_iter {
|
||||||
if attr.name() != "deprecated" {
|
if attr.path != "deprecated" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -847,8 +923,8 @@ pub fn find_deprecation(diagnostic: &Handler, attrs: &[Attribute],
|
||||||
/// structure layout, and `packed` to remove padding.
|
/// structure layout, and `packed` to remove padding.
|
||||||
pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr> {
|
pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr> {
|
||||||
let mut acc = Vec::new();
|
let mut acc = Vec::new();
|
||||||
match attr.value.node {
|
if attr.path == "repr" {
|
||||||
ast::MetaItemKind::List(ref items) if attr.value.name == "repr" => {
|
if let Some(items) = attr.meta_item_list() {
|
||||||
mark_used(attr);
|
mark_used(attr);
|
||||||
for item in items {
|
for item in items {
|
||||||
if !item.is_meta_item() {
|
if !item.is_meta_item() {
|
||||||
|
@ -883,8 +959,6 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Not a "repr" hint: ignore.
|
|
||||||
_ => { }
|
|
||||||
}
|
}
|
||||||
acc
|
acc
|
||||||
}
|
}
|
||||||
|
@ -931,6 +1005,206 @@ impl IntType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl MetaItem {
|
||||||
|
fn tokens(&self) -> TokenStream {
|
||||||
|
let ident = TokenTree::Token(self.span, Token::Ident(Ident::with_empty_ctxt(self.name)));
|
||||||
|
TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
|
||||||
|
where I: Iterator<Item = TokenTree>,
|
||||||
|
{
|
||||||
|
let (mut span, name) = match tokens.next() {
|
||||||
|
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
|
||||||
|
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt {
|
||||||
|
token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
let node = match MetaItemKind::from_tokens(tokens) {
|
||||||
|
Some(node) => node,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
if let Some(last_span) = node.last_span() {
|
||||||
|
span.hi = last_span.hi;
|
||||||
|
}
|
||||||
|
Some(MetaItem { name: name, span: span, node: node })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MetaItemKind {
|
||||||
|
fn last_span(&self) -> Option<Span> {
|
||||||
|
match *self {
|
||||||
|
MetaItemKind::Word => None,
|
||||||
|
MetaItemKind::List(ref list) => list.last().map(NestedMetaItem::span),
|
||||||
|
MetaItemKind::NameValue(ref lit) => Some(lit.span),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tokens(&self, span: Span) -> TokenStream {
|
||||||
|
match *self {
|
||||||
|
MetaItemKind::Word => TokenStream::empty(),
|
||||||
|
MetaItemKind::NameValue(ref lit) => {
|
||||||
|
TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
|
||||||
|
}
|
||||||
|
MetaItemKind::List(ref list) => {
|
||||||
|
let mut tokens = Vec::new();
|
||||||
|
for (i, item) in list.iter().enumerate() {
|
||||||
|
if i > 0 {
|
||||||
|
tokens.push(TokenTree::Token(span, Token::Comma).into());
|
||||||
|
}
|
||||||
|
tokens.push(item.node.tokens());
|
||||||
|
}
|
||||||
|
TokenTree::Delimited(span, Delimited {
|
||||||
|
delim: token::Paren,
|
||||||
|
tts: TokenStream::concat(tokens).into(),
|
||||||
|
}).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
|
||||||
|
where I: Iterator<Item = TokenTree>,
|
||||||
|
{
|
||||||
|
let delimited = match tokens.peek().cloned() {
|
||||||
|
Some(TokenTree::Token(_, token::Eq)) => {
|
||||||
|
tokens.next();
|
||||||
|
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
||||||
|
LitKind::from_token(token)
|
||||||
|
.map(|lit| MetaItemKind::NameValue(Spanned { node: lit, span: span }))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Some(TokenTree::Delimited(_, ref delimited)) if delimited.delim == token::Paren => {
|
||||||
|
tokens.next();
|
||||||
|
delimited.stream()
|
||||||
|
}
|
||||||
|
_ => return Some(MetaItemKind::Word),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut tokens = delimited.into_trees().peekable();
|
||||||
|
let mut result = Vec::new();
|
||||||
|
while let Some(..) = tokens.peek() {
|
||||||
|
match NestedMetaItemKind::from_tokens(&mut tokens) {
|
||||||
|
Some(item) => result.push(Spanned { span: item.span(), node: item }),
|
||||||
|
None => return None,
|
||||||
|
}
|
||||||
|
match tokens.next() {
|
||||||
|
None | Some(TokenTree::Token(_, Token::Comma)) => {}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(MetaItemKind::List(result))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NestedMetaItemKind {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match *self {
|
||||||
|
NestedMetaItemKind::MetaItem(ref item) => item.span,
|
||||||
|
NestedMetaItemKind::Literal(ref lit) => lit.span,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tokens(&self) -> TokenStream {
|
||||||
|
match *self {
|
||||||
|
NestedMetaItemKind::MetaItem(ref item) => item.tokens(),
|
||||||
|
NestedMetaItemKind::Literal(ref lit) => lit.tokens(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItemKind>
|
||||||
|
where I: Iterator<Item = TokenTree>,
|
||||||
|
{
|
||||||
|
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
||||||
|
if let Some(node) = LitKind::from_token(token) {
|
||||||
|
tokens.next();
|
||||||
|
return Some(NestedMetaItemKind::Literal(Spanned { node: node, span: span }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
MetaItem::from_tokens(tokens).map(NestedMetaItemKind::MetaItem)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Lit {
|
||||||
|
fn tokens(&self) -> TokenStream {
|
||||||
|
TokenTree::Token(self.span, self.node.token()).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LitKind {
|
||||||
|
fn token(&self) -> Token {
|
||||||
|
use std::ascii;
|
||||||
|
|
||||||
|
match *self {
|
||||||
|
LitKind::Str(string, ast::StrStyle::Cooked) => {
|
||||||
|
let mut escaped = String::new();
|
||||||
|
for ch in string.as_str().chars() {
|
||||||
|
escaped.extend(ch.escape_unicode());
|
||||||
|
}
|
||||||
|
Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)
|
||||||
|
}
|
||||||
|
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
|
||||||
|
Token::Literal(token::Lit::StrRaw(string, n), None)
|
||||||
|
}
|
||||||
|
LitKind::ByteStr(ref bytes) => {
|
||||||
|
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
|
||||||
|
.map(Into::<char>::into).collect::<String>();
|
||||||
|
Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None)
|
||||||
|
}
|
||||||
|
LitKind::Byte(byte) => {
|
||||||
|
let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
|
||||||
|
Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None)
|
||||||
|
}
|
||||||
|
LitKind::Char(ch) => {
|
||||||
|
let string: String = ch.escape_default().map(Into::<char>::into).collect();
|
||||||
|
Token::Literal(token::Lit::Char(Symbol::intern(&string)), None)
|
||||||
|
}
|
||||||
|
LitKind::Int(n, ty) => {
|
||||||
|
let suffix = match ty {
|
||||||
|
ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
||||||
|
ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
|
||||||
|
ast::LitIntType::Unsuffixed => None,
|
||||||
|
};
|
||||||
|
Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
|
||||||
|
}
|
||||||
|
LitKind::Float(symbol, ty) => {
|
||||||
|
Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
|
||||||
|
}
|
||||||
|
LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
|
||||||
|
LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
|
||||||
|
true => "true",
|
||||||
|
false => "false",
|
||||||
|
}))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_token(token: Token) -> Option<LitKind> {
|
||||||
|
match token {
|
||||||
|
Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
|
||||||
|
Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
|
||||||
|
Token::Interpolated(ref nt) => match **nt {
|
||||||
|
token::NtExpr(ref v) => match v.node {
|
||||||
|
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
Token::Literal(lit, suf) => {
|
||||||
|
let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
|
||||||
|
if suffix_illegal && suf.is_some() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait HasAttrs: Sized {
|
pub trait HasAttrs: Sized {
|
||||||
fn attrs(&self) -> &[ast::Attribute];
|
fn attrs(&self) -> &[ast::Attribute];
|
||||||
fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self;
|
fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self;
|
||||||
|
|
|
@ -13,9 +13,10 @@ use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features
|
||||||
use {fold, attr};
|
use {fold, attr};
|
||||||
use ast;
|
use ast;
|
||||||
use codemap::Spanned;
|
use codemap::Spanned;
|
||||||
use parse::ParseSess;
|
use parse::{token, ParseSess};
|
||||||
use ptr::P;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
|
use ptr::P;
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
|
|
||||||
/// A folder that strips out items that do not belong in the current configuration.
|
/// A folder that strips out items that do not belong in the current configuration.
|
||||||
|
@ -84,43 +85,33 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
return Some(attr);
|
return Some(attr);
|
||||||
}
|
}
|
||||||
|
|
||||||
let attr_list = match attr.meta_item_list() {
|
let (cfg, path, tokens, span) = match attr.parse(self.sess, |parser| {
|
||||||
Some(attr_list) => attr_list,
|
parser.expect(&token::OpenDelim(token::Paren))?;
|
||||||
None => {
|
let cfg = parser.parse_meta_item()?;
|
||||||
let msg = "expected `#[cfg_attr(<cfg pattern>, <attr>)]`";
|
parser.expect(&token::Comma)?;
|
||||||
self.sess.span_diagnostic.span_err(attr.span, msg);
|
let lo = parser.span.lo;
|
||||||
|
let (path, tokens) = parser.parse_path_and_tokens()?;
|
||||||
|
parser.expect(&token::CloseDelim(token::Paren))?;
|
||||||
|
Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span }))
|
||||||
|
}) {
|
||||||
|
Ok(result) => result,
|
||||||
|
Err(mut e) => {
|
||||||
|
e.emit();
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {
|
if attr::cfg_matches(&cfg, self.sess, self.features) {
|
||||||
(2, Some(cfg), Some(mi)) => (cfg, mi),
|
self.process_cfg_attr(ast::Attribute {
|
||||||
_ => {
|
id: attr::mk_attr_id(),
|
||||||
let msg = "expected `#[cfg_attr(<cfg pattern>, <attr>)]`";
|
style: attr.style,
|
||||||
self.sess.span_diagnostic.span_err(attr.span, msg);
|
path: path,
|
||||||
return None;
|
tokens: tokens,
|
||||||
}
|
is_sugared_doc: false,
|
||||||
};
|
span: span,
|
||||||
|
})
|
||||||
use attr::cfg_matches;
|
} else {
|
||||||
match (cfg.meta_item(), mi.meta_item()) {
|
None
|
||||||
(Some(cfg), Some(mi)) =>
|
|
||||||
if cfg_matches(&cfg, self.sess, self.features) {
|
|
||||||
self.process_cfg_attr(ast::Attribute {
|
|
||||||
id: attr::mk_attr_id(),
|
|
||||||
style: attr.style,
|
|
||||||
value: mi.clone(),
|
|
||||||
is_sugared_doc: false,
|
|
||||||
span: mi.span,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
let msg = "unexpected literal(s) in `#[cfg_attr(<cfg pattern>, <attr>)]`";
|
|
||||||
self.sess.span_diagnostic.span_err(attr.span, msg);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -132,9 +123,12 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mis = match attr.value.node {
|
let mis = if !is_cfg(&attr) {
|
||||||
ast::MetaItemKind::List(ref mis) if is_cfg(&attr) => mis,
|
return true;
|
||||||
_ => return true
|
} else if let Some(mis) = attr.meta_item_list() {
|
||||||
|
mis
|
||||||
|
} else {
|
||||||
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
if mis.len() != 1 {
|
if mis.len() != 1 {
|
||||||
|
|
|
@ -12,36 +12,31 @@ use attr::HasAttrs;
|
||||||
use {ast, codemap};
|
use {ast, codemap};
|
||||||
use ext::base::ExtCtxt;
|
use ext::base::ExtCtxt;
|
||||||
use ext::build::AstBuilder;
|
use ext::build::AstBuilder;
|
||||||
|
use parse::parser::PathStyle;
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<(Symbol, Span)> {
|
pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
attrs.retain(|attr| {
|
attrs.retain(|attr| {
|
||||||
if attr.name() != "derive" {
|
if attr.path != "derive" {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if attr.value_str().is_some() {
|
match attr.parse_list(cx.parse_sess, |parser| parser.parse_path(PathStyle::Mod)) {
|
||||||
cx.span_err(attr.span, "unexpected value in `derive`");
|
Ok(ref traits) if traits.is_empty() => {
|
||||||
return false;
|
cx.span_warn(attr.span, "empty trait list in `derive`");
|
||||||
}
|
false
|
||||||
|
}
|
||||||
let traits = attr.meta_item_list().unwrap_or(&[]).to_owned();
|
Ok(traits) => {
|
||||||
if traits.is_empty() {
|
result.extend(traits);
|
||||||
cx.span_warn(attr.span, "empty trait list in `derive`");
|
true
|
||||||
return false;
|
}
|
||||||
}
|
Err(mut e) => {
|
||||||
|
e.emit();
|
||||||
for titem in traits {
|
false
|
||||||
if titem.word().is_none() {
|
|
||||||
cx.span_err(titem.span, "malformed `derive` entry");
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
result.push((titem.name().unwrap(), titem.span));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
|
||||||
});
|
});
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
@ -60,21 +55,21 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_derived_markers<T: HasAttrs>(cx: &mut ExtCtxt, traits: &[(Symbol, Span)], item: T) -> T {
|
pub fn add_derived_markers<T: HasAttrs>(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T {
|
||||||
let span = match traits.get(0) {
|
let span = match traits.get(0) {
|
||||||
Some(&(_, span)) => span,
|
Some(path) => path.span,
|
||||||
None => return item,
|
None => return item,
|
||||||
};
|
};
|
||||||
|
|
||||||
item.map_attrs(|mut attrs| {
|
item.map_attrs(|mut attrs| {
|
||||||
if traits.iter().any(|&(name, _)| name == "PartialEq") &&
|
if traits.iter().any(|path| *path == "PartialEq") &&
|
||||||
traits.iter().any(|&(name, _)| name == "Eq") {
|
traits.iter().any(|path| *path == "Eq") {
|
||||||
let span = allow_unstable(cx, span, "derive(PartialEq, Eq)");
|
let span = allow_unstable(cx, span, "derive(PartialEq, Eq)");
|
||||||
let meta = cx.meta_word(span, Symbol::intern("structural_match"));
|
let meta = cx.meta_word(span, Symbol::intern("structural_match"));
|
||||||
attrs.push(cx.attribute(span, meta));
|
attrs.push(cx.attribute(span, meta));
|
||||||
}
|
}
|
||||||
if traits.iter().any(|&(name, _)| name == "Copy") &&
|
if traits.iter().any(|path| *path == "Copy") &&
|
||||||
traits.iter().any(|&(name, _)| name == "Clone") {
|
traits.iter().any(|path| *path == "Clone") {
|
||||||
let span = allow_unstable(cx, span, "derive(Copy, Clone)");
|
let span = allow_unstable(cx, span, "derive(Copy, Clone)");
|
||||||
let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker"));
|
let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker"));
|
||||||
attrs.push(cx.attribute(span, meta));
|
attrs.push(cx.attribute(span, meta));
|
||||||
|
|
|
@ -8,8 +8,8 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast::{self, Block, Ident, PatKind};
|
use ast::{self, Block, Ident, PatKind, Path};
|
||||||
use ast::{Name, MacStmtStyle, StmtKind, ItemKind};
|
use ast::{MacStmtStyle, StmtKind, ItemKind};
|
||||||
use attr::{self, HasAttrs};
|
use attr::{self, HasAttrs};
|
||||||
use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
|
use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
|
||||||
use config::{is_test_or_bench, StripUnconfigured};
|
use config::{is_test_or_bench, StripUnconfigured};
|
||||||
|
@ -27,7 +27,7 @@ use ptr::P;
|
||||||
use std_inject;
|
use std_inject;
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use symbol::keywords;
|
use symbol::keywords;
|
||||||
use syntax_pos::{self, Span, ExpnId};
|
use syntax_pos::{Span, ExpnId, DUMMY_SP};
|
||||||
use tokenstream::TokenStream;
|
use tokenstream::TokenStream;
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
use visit::Visitor;
|
use visit::Visitor;
|
||||||
|
@ -165,12 +165,11 @@ pub enum InvocationKind {
|
||||||
},
|
},
|
||||||
Attr {
|
Attr {
|
||||||
attr: Option<ast::Attribute>,
|
attr: Option<ast::Attribute>,
|
||||||
traits: Vec<(Symbol, Span)>,
|
traits: Vec<Path>,
|
||||||
item: Annotatable,
|
item: Annotatable,
|
||||||
},
|
},
|
||||||
Derive {
|
Derive {
|
||||||
name: Symbol,
|
path: Path,
|
||||||
span: Span,
|
|
||||||
item: Annotatable,
|
item: Annotatable,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -180,8 +179,8 @@ impl Invocation {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
InvocationKind::Bang { span, .. } => span,
|
InvocationKind::Bang { span, .. } => span,
|
||||||
InvocationKind::Attr { attr: Some(ref attr), .. } => attr.span,
|
InvocationKind::Attr { attr: Some(ref attr), .. } => attr.span,
|
||||||
InvocationKind::Attr { attr: None, .. } => syntax_pos::DUMMY_SP,
|
InvocationKind::Attr { attr: None, .. } => DUMMY_SP,
|
||||||
InvocationKind::Derive { span, .. } => span,
|
InvocationKind::Derive { ref path, .. } => path.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -272,17 +271,16 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
self.collect_invocations(expansion, &[])
|
self.collect_invocations(expansion, &[])
|
||||||
} else if let InvocationKind::Attr { attr: None, traits, item } = invoc.kind {
|
} else if let InvocationKind::Attr { attr: None, traits, item } = invoc.kind {
|
||||||
let item = item
|
let item = item
|
||||||
.map_attrs(|mut attrs| { attrs.retain(|a| a.name() != "derive"); attrs });
|
.map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs });
|
||||||
let item_with_markers =
|
let item_with_markers =
|
||||||
add_derived_markers(&mut self.cx, &traits, item.clone());
|
add_derived_markers(&mut self.cx, &traits, item.clone());
|
||||||
let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new);
|
let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new);
|
||||||
|
|
||||||
for &(name, span) in &traits {
|
for path in &traits {
|
||||||
let mark = Mark::fresh();
|
let mark = Mark::fresh();
|
||||||
derives.push(mark);
|
derives.push(mark);
|
||||||
let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
|
|
||||||
let item = match self.cx.resolver.resolve_macro(
|
let item = match self.cx.resolver.resolve_macro(
|
||||||
Mark::root(), &path, MacroKind::Derive, false) {
|
Mark::root(), path, MacroKind::Derive, false) {
|
||||||
Ok(ext) => match *ext {
|
Ok(ext) => match *ext {
|
||||||
SyntaxExtension::BuiltinDerive(..) => item_with_markers.clone(),
|
SyntaxExtension::BuiltinDerive(..) => item_with_markers.clone(),
|
||||||
_ => item.clone(),
|
_ => item.clone(),
|
||||||
|
@ -290,7 +288,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
_ => item.clone(),
|
_ => item.clone(),
|
||||||
};
|
};
|
||||||
invocations.push(Invocation {
|
invocations.push(Invocation {
|
||||||
kind: InvocationKind::Derive { name: name, span: span, item: item },
|
kind: InvocationKind::Derive { path: path.clone(), item: item },
|
||||||
expansion_kind: invoc.expansion_kind,
|
expansion_kind: invoc.expansion_kind,
|
||||||
expansion_data: ExpansionData {
|
expansion_data: ExpansionData {
|
||||||
mark: mark,
|
mark: mark,
|
||||||
|
@ -380,11 +378,10 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
};
|
};
|
||||||
|
|
||||||
attr::mark_used(&attr);
|
attr::mark_used(&attr);
|
||||||
let name = attr.name();
|
|
||||||
self.cx.bt_push(ExpnInfo {
|
self.cx.bt_push(ExpnInfo {
|
||||||
call_site: attr.span,
|
call_site: attr.span,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
format: MacroAttribute(name),
|
format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))),
|
||||||
span: Some(attr.span),
|
span: Some(attr.span),
|
||||||
allow_internal_unstable: false,
|
allow_internal_unstable: false,
|
||||||
}
|
}
|
||||||
|
@ -392,25 +389,25 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
|
|
||||||
match *ext {
|
match *ext {
|
||||||
MultiModifier(ref mac) => {
|
MultiModifier(ref mac) => {
|
||||||
let item = mac.expand(self.cx, attr.span, &attr.value, item);
|
let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
|
||||||
|
let item = mac.expand(self.cx, attr.span, &meta, item);
|
||||||
kind.expect_from_annotatables(item)
|
kind.expect_from_annotatables(item)
|
||||||
}
|
}
|
||||||
MultiDecorator(ref mac) => {
|
MultiDecorator(ref mac) => {
|
||||||
let mut items = Vec::new();
|
let mut items = Vec::new();
|
||||||
mac.expand(self.cx, attr.span, &attr.value, &item,
|
let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
|
||||||
&mut |item| items.push(item));
|
mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
|
||||||
items.push(item);
|
items.push(item);
|
||||||
kind.expect_from_annotatables(items)
|
kind.expect_from_annotatables(items)
|
||||||
}
|
}
|
||||||
SyntaxExtension::AttrProcMacro(ref mac) => {
|
SyntaxExtension::AttrProcMacro(ref mac) => {
|
||||||
let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess);
|
|
||||||
let item_toks = stream_for_item(&item, &self.cx.parse_sess);
|
let item_toks = stream_for_item(&item, &self.cx.parse_sess);
|
||||||
|
|
||||||
let span = Span {
|
let span = Span {
|
||||||
expn_id: self.cx.codemap().record_expansion(ExpnInfo {
|
expn_id: self.cx.codemap().record_expansion(ExpnInfo {
|
||||||
call_site: attr.span,
|
call_site: attr.span,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
format: MacroAttribute(name),
|
format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))),
|
||||||
span: None,
|
span: None,
|
||||||
allow_internal_unstable: false,
|
allow_internal_unstable: false,
|
||||||
},
|
},
|
||||||
|
@ -418,15 +415,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
..attr.span
|
..attr.span
|
||||||
};
|
};
|
||||||
|
|
||||||
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
|
let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks);
|
||||||
self.parse_expansion(tok_result, kind, name, span)
|
self.parse_expansion(tok_result, kind, &attr.path, span)
|
||||||
}
|
}
|
||||||
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
|
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
|
||||||
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", name));
|
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path));
|
||||||
kind.dummy(attr.span)
|
kind.dummy(attr.span)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let msg = &format!("macro `{}` may not be used in attributes", name);
|
let msg = &format!("macro `{}` may not be used in attributes", attr.path);
|
||||||
self.cx.span_err(attr.span, &msg);
|
self.cx.span_err(attr.span, &msg);
|
||||||
kind.dummy(attr.span)
|
kind.dummy(attr.span)
|
||||||
}
|
}
|
||||||
|
@ -442,7 +439,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
};
|
};
|
||||||
let path = &mac.node.path;
|
let path = &mac.node.path;
|
||||||
|
|
||||||
let extname = path.segments.last().unwrap().identifier.name;
|
|
||||||
let ident = ident.unwrap_or(keywords::Invalid.ident());
|
let ident = ident.unwrap_or(keywords::Invalid.ident());
|
||||||
let marked_tts =
|
let marked_tts =
|
||||||
noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None });
|
noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None });
|
||||||
|
@ -450,7 +446,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
|
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
|
||||||
if ident.name != keywords::Invalid.name() {
|
if ident.name != keywords::Invalid.name() {
|
||||||
let msg =
|
let msg =
|
||||||
format!("macro {}! expects no ident argument, given '{}'", extname, ident);
|
format!("macro {}! expects no ident argument, given '{}'", path, ident);
|
||||||
self.cx.span_err(path.span, &msg);
|
self.cx.span_err(path.span, &msg);
|
||||||
return kind.dummy(span);
|
return kind.dummy(span);
|
||||||
}
|
}
|
||||||
|
@ -458,7 +454,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
self.cx.bt_push(ExpnInfo {
|
self.cx.bt_push(ExpnInfo {
|
||||||
call_site: span,
|
call_site: span,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
format: MacroBang(extname),
|
format: MacroBang(Symbol::intern(&format!("{}", path))),
|
||||||
span: exp_span,
|
span: exp_span,
|
||||||
allow_internal_unstable: allow_internal_unstable,
|
allow_internal_unstable: allow_internal_unstable,
|
||||||
},
|
},
|
||||||
|
@ -470,14 +466,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
IdentTT(ref expander, tt_span, allow_internal_unstable) => {
|
IdentTT(ref expander, tt_span, allow_internal_unstable) => {
|
||||||
if ident.name == keywords::Invalid.name() {
|
if ident.name == keywords::Invalid.name() {
|
||||||
self.cx.span_err(path.span,
|
self.cx.span_err(path.span,
|
||||||
&format!("macro {}! expects an ident argument", extname));
|
&format!("macro {}! expects an ident argument", path));
|
||||||
return kind.dummy(span);
|
return kind.dummy(span);
|
||||||
};
|
};
|
||||||
|
|
||||||
self.cx.bt_push(ExpnInfo {
|
self.cx.bt_push(ExpnInfo {
|
||||||
call_site: span,
|
call_site: span,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
format: MacroBang(extname),
|
format: MacroBang(Symbol::intern(&format!("{}", path))),
|
||||||
span: tt_span,
|
span: tt_span,
|
||||||
allow_internal_unstable: allow_internal_unstable,
|
allow_internal_unstable: allow_internal_unstable,
|
||||||
}
|
}
|
||||||
|
@ -489,19 +485,19 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
|
|
||||||
MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
|
MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
|
||||||
self.cx.span_err(path.span,
|
self.cx.span_err(path.span,
|
||||||
&format!("`{}` can only be used in attributes", extname));
|
&format!("`{}` can only be used in attributes", path));
|
||||||
return kind.dummy(span);
|
return kind.dummy(span);
|
||||||
}
|
}
|
||||||
|
|
||||||
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
|
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
|
||||||
self.cx.span_err(path.span, &format!("`{}` is a derive mode", extname));
|
self.cx.span_err(path.span, &format!("`{}` is a derive mode", path));
|
||||||
return kind.dummy(span);
|
return kind.dummy(span);
|
||||||
}
|
}
|
||||||
|
|
||||||
SyntaxExtension::ProcMacro(ref expandfun) => {
|
SyntaxExtension::ProcMacro(ref expandfun) => {
|
||||||
if ident.name != keywords::Invalid.name() {
|
if ident.name != keywords::Invalid.name() {
|
||||||
let msg =
|
let msg =
|
||||||
format!("macro {}! expects no ident argument, given '{}'", extname, ident);
|
format!("macro {}! expects no ident argument, given '{}'", path, ident);
|
||||||
self.cx.span_err(path.span, &msg);
|
self.cx.span_err(path.span, &msg);
|
||||||
return kind.dummy(span);
|
return kind.dummy(span);
|
||||||
}
|
}
|
||||||
|
@ -509,7 +505,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
self.cx.bt_push(ExpnInfo {
|
self.cx.bt_push(ExpnInfo {
|
||||||
call_site: span,
|
call_site: span,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
format: MacroBang(extname),
|
format: MacroBang(Symbol::intern(&format!("{}", path))),
|
||||||
// FIXME procedural macros do not have proper span info
|
// FIXME procedural macros do not have proper span info
|
||||||
// yet, when they do, we should use it here.
|
// yet, when they do, we should use it here.
|
||||||
span: None,
|
span: None,
|
||||||
|
@ -519,7 +515,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
});
|
});
|
||||||
|
|
||||||
let tok_result = expandfun.expand(self.cx, span, marked_tts);
|
let tok_result = expandfun.expand(self.cx, span, marked_tts);
|
||||||
Some(self.parse_expansion(tok_result, kind, extname, span))
|
Some(self.parse_expansion(tok_result, kind, path, span))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -541,19 +537,24 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
/// Expand a derive invocation. Returns the result of expansion.
|
/// Expand a derive invocation. Returns the result of expansion.
|
||||||
fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -> Expansion {
|
fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -> Expansion {
|
||||||
let Invocation { expansion_kind: kind, .. } = invoc;
|
let Invocation { expansion_kind: kind, .. } = invoc;
|
||||||
let (name, span, item) = match invoc.kind {
|
let (path, item) = match invoc.kind {
|
||||||
InvocationKind::Derive { name, span, item } => (name, span, item),
|
InvocationKind::Derive { path, item } => (path, item),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mitem = ast::MetaItem { name: name, span: span, node: ast::MetaItemKind::Word };
|
let pretty_name = Symbol::intern(&format!("derive({})", path));
|
||||||
let pretty_name = Symbol::intern(&format!("derive({})", name));
|
let span = path.span;
|
||||||
|
let attr = ast::Attribute {
|
||||||
|
path: path, tokens: TokenStream::empty(), span: span,
|
||||||
|
// irrelevant:
|
||||||
|
id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false,
|
||||||
|
};
|
||||||
|
|
||||||
self.cx.bt_push(ExpnInfo {
|
self.cx.bt_push(ExpnInfo {
|
||||||
call_site: span,
|
call_site: span,
|
||||||
callee: NameAndSpan {
|
callee: NameAndSpan {
|
||||||
format: MacroAttribute(pretty_name),
|
format: MacroAttribute(pretty_name),
|
||||||
span: Some(span),
|
span: None,
|
||||||
allow_internal_unstable: false,
|
allow_internal_unstable: false,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -571,7 +572,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
}),
|
}),
|
||||||
..span
|
..span
|
||||||
};
|
};
|
||||||
return kind.expect_from_annotatables(ext.expand(self.cx, span, &mitem, item));
|
let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
|
||||||
|
name: keywords::Invalid.name(),
|
||||||
|
span: DUMMY_SP,
|
||||||
|
node: ast::MetaItemKind::Word,
|
||||||
|
};
|
||||||
|
return kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item));
|
||||||
}
|
}
|
||||||
SyntaxExtension::BuiltinDerive(func) => {
|
SyntaxExtension::BuiltinDerive(func) => {
|
||||||
let span = Span {
|
let span = Span {
|
||||||
|
@ -586,20 +592,18 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
..span
|
..span
|
||||||
};
|
};
|
||||||
let mut items = Vec::new();
|
let mut items = Vec::new();
|
||||||
func(self.cx, span, &mitem, &item, &mut |a| {
|
func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a));
|
||||||
items.push(a)
|
|
||||||
});
|
|
||||||
return kind.expect_from_annotatables(items);
|
return kind.expect_from_annotatables(items);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let msg = &format!("macro `{}` may not be used for derive attributes", name);
|
let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
|
||||||
self.cx.span_err(span, &msg);
|
self.cx.span_err(span, &msg);
|
||||||
kind.dummy(span)
|
kind.dummy(span)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span)
|
fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, path: &Path, span: Span)
|
||||||
-> Expansion {
|
-> Expansion {
|
||||||
let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::<Vec<_>>());
|
let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::<Vec<_>>());
|
||||||
let expansion = match parser.parse_expansion(kind, false) {
|
let expansion = match parser.parse_expansion(kind, false) {
|
||||||
|
@ -609,7 +613,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
return kind.dummy(span);
|
return kind.dummy(span);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
parser.ensure_complete_parse(name, kind.name(), span);
|
parser.ensure_complete_parse(path, kind.name(), span);
|
||||||
// FIXME better span info
|
// FIXME better span info
|
||||||
expansion.fold_with(&mut ChangeSpan { span: span })
|
expansion.fold_with(&mut ChangeSpan { span: span })
|
||||||
}
|
}
|
||||||
|
@ -658,14 +662,14 @@ impl<'a> Parser<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ensure_complete_parse(&mut self, macro_name: ast::Name, kind_name: &str, span: Span) {
|
pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span: Span) {
|
||||||
if self.token != token::Eof {
|
if self.token != token::Eof {
|
||||||
let msg = format!("macro expansion ignores token `{}` and any following",
|
let msg = format!("macro expansion ignores token `{}` and any following",
|
||||||
self.this_token_to_string());
|
self.this_token_to_string());
|
||||||
let mut err = self.diagnostic().struct_span_err(self.span, &msg);
|
let mut err = self.diagnostic().struct_span_err(self.span, &msg);
|
||||||
let msg = format!("caused by the macro expansion here; the usage \
|
let msg = format!("caused by the macro expansion here; the usage \
|
||||||
of `{}!` is likely invalid in {} context",
|
of `{}!` is likely invalid in {} context",
|
||||||
macro_name, kind_name);
|
macro_path, kind_name);
|
||||||
err.span_note(span, &msg).emit();
|
err.span_note(span, &msg).emit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -708,20 +712,20 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
|
||||||
|
|
||||||
fn collect_attr(&mut self,
|
fn collect_attr(&mut self,
|
||||||
attr: Option<ast::Attribute>,
|
attr: Option<ast::Attribute>,
|
||||||
traits: Vec<(Symbol, Span)>,
|
traits: Vec<Path>,
|
||||||
item: Annotatable,
|
item: Annotatable,
|
||||||
kind: ExpansionKind)
|
kind: ExpansionKind)
|
||||||
-> Expansion {
|
-> Expansion {
|
||||||
if !traits.is_empty() &&
|
if !traits.is_empty() &&
|
||||||
(kind == ExpansionKind::TraitItems || kind == ExpansionKind::ImplItems) {
|
(kind == ExpansionKind::TraitItems || kind == ExpansionKind::ImplItems) {
|
||||||
self.cx.span_err(traits[0].1, "`derive` can be only be applied to items");
|
self.cx.span_err(traits[0].span, "`derive` can be only be applied to items");
|
||||||
return kind.expect_from_annotatables(::std::iter::once(item));
|
return kind.expect_from_annotatables(::std::iter::once(item));
|
||||||
}
|
}
|
||||||
self.collect(kind, InvocationKind::Attr { attr: attr, traits: traits, item: item })
|
self.collect(kind, InvocationKind::Attr { attr: attr, traits: traits, item: item })
|
||||||
}
|
}
|
||||||
|
|
||||||
// If `item` is an attr invocation, remove and return the macro attribute.
|
// If `item` is an attr invocation, remove and return the macro attribute.
|
||||||
fn classify_item<T>(&mut self, mut item: T) -> (Option<ast::Attribute>, Vec<(Symbol, Span)>, T)
|
fn classify_item<T>(&mut self, mut item: T) -> (Option<ast::Attribute>, Vec<Path>, T)
|
||||||
where T: HasAttrs,
|
where T: HasAttrs,
|
||||||
{
|
{
|
||||||
let (mut attr, mut traits) = (None, Vec::new());
|
let (mut attr, mut traits) = (None, Vec::new());
|
||||||
|
@ -784,32 +788,6 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
|
||||||
string_to_stream(text, parse_sess)
|
string_to_stream(text, parse_sess)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream {
|
|
||||||
use ast::MetaItemKind::*;
|
|
||||||
use print::pp::Breaks;
|
|
||||||
use print::pprust::PrintState;
|
|
||||||
|
|
||||||
let token_string = match attr.value.node {
|
|
||||||
// For `#[foo]`, an empty token
|
|
||||||
Word => return TokenStream::empty(),
|
|
||||||
// For `#[foo(bar, baz)]`, returns `(bar, baz)`
|
|
||||||
List(ref items) => pprust::to_string(|s| {
|
|
||||||
s.popen()?;
|
|
||||||
s.commasep(Breaks::Consistent,
|
|
||||||
&items[..],
|
|
||||||
|s, i| s.print_meta_list_item(&i))?;
|
|
||||||
s.pclose()
|
|
||||||
}),
|
|
||||||
// For `#[foo = "bar"]`, returns `= "bar"`
|
|
||||||
NameValue(ref lit) => pprust::to_string(|s| {
|
|
||||||
s.word_space("=")?;
|
|
||||||
s.print_literal(lit)
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
string_to_stream(token_string, parse_sess)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
|
fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
|
||||||
let filename = String::from("<macro expansion>");
|
let filename = String::from("<macro expansion>");
|
||||||
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
|
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
|
||||||
|
@ -926,7 +904,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
|
||||||
// Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`).
|
// Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`).
|
||||||
// In the non-inline case, `inner` is never the dummy span (c.f. `parse_item_mod`).
|
// In the non-inline case, `inner` is never the dummy span (c.f. `parse_item_mod`).
|
||||||
// Thus, if `inner` is the dummy span, we know the module is inline.
|
// Thus, if `inner` is the dummy span, we know the module is inline.
|
||||||
let inline_module = item.span.contains(inner) || inner == syntax_pos::DUMMY_SP;
|
let inline_module = item.span.contains(inner) || inner == DUMMY_SP;
|
||||||
|
|
||||||
if inline_module {
|
if inline_module {
|
||||||
if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
|
if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
|
||||||
|
|
|
@ -220,16 +220,24 @@ pub mod rt {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ast::Attribute {
|
impl ToTokens for ast::Attribute {
|
||||||
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
let mut r = vec![];
|
let mut r = vec![];
|
||||||
// FIXME: The spans could be better
|
// FIXME: The spans could be better
|
||||||
r.push(TokenTree::Token(self.span, token::Pound));
|
r.push(TokenTree::Token(self.span, token::Pound));
|
||||||
if self.style == ast::AttrStyle::Inner {
|
if self.style == ast::AttrStyle::Inner {
|
||||||
r.push(TokenTree::Token(self.span, token::Not));
|
r.push(TokenTree::Token(self.span, token::Not));
|
||||||
}
|
}
|
||||||
|
let mut inner = Vec::new();
|
||||||
|
for (i, segment) in self.path.segments.iter().enumerate() {
|
||||||
|
if i > 0 {
|
||||||
|
inner.push(TokenTree::Token(self.span, token::Colon).into());
|
||||||
|
}
|
||||||
|
inner.push(TokenTree::Token(self.span, token::Ident(segment.identifier)).into());
|
||||||
|
}
|
||||||
|
inner.push(self.tokens.clone());
|
||||||
|
|
||||||
r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
|
r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
|
||||||
delim: token::Bracket,
|
delim: token::Bracket, tts: TokenStream::concat(inner).into()
|
||||||
tts: self.value.to_tokens(cx).into_iter().collect::<TokenStream>().into(),
|
|
||||||
}));
|
}));
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
|
|
@ -488,7 +488,7 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op
|
||||||
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
||||||
match name {
|
match name {
|
||||||
"tt" => {
|
"tt" => {
|
||||||
return token::NtTT(panictry!(p.parse_token_tree()));
|
return token::NtTT(p.parse_token_tree());
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,8 @@ impl<'a> ParserAnyMacro<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure we don't have any tokens left to parse so we don't silently drop anything.
|
// Make sure we don't have any tokens left to parse so we don't silently drop anything.
|
||||||
parser.ensure_complete_parse(macro_ident.name, kind.name(), site_span);
|
let path = ast::Path::from_ident(site_span, macro_ident);
|
||||||
|
parser.ensure_complete_parse(&path, kind.name(), site_span);
|
||||||
expansion
|
expansion
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -862,35 +862,34 @@ macro_rules! gate_feature {
|
||||||
impl<'a> Context<'a> {
|
impl<'a> Context<'a> {
|
||||||
fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
|
fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
|
||||||
debug!("check_attribute(attr = {:?})", attr);
|
debug!("check_attribute(attr = {:?})", attr);
|
||||||
let name = &*attr.name().as_str();
|
let name = unwrap_or!(attr.name(), return);
|
||||||
|
|
||||||
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
|
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
|
||||||
if n == name {
|
if name == n {
|
||||||
if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
|
if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
|
||||||
gate_feature_fn!(self, has_feature, attr.span, name, desc);
|
gate_feature_fn!(self, has_feature, attr.span, name, desc);
|
||||||
}
|
}
|
||||||
debug!("check_attribute: {:?} is builtin, {:?}, {:?}", name, ty, gateage);
|
debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for &(ref n, ref ty) in self.plugin_attributes {
|
for &(ref n, ref ty) in self.plugin_attributes {
|
||||||
if n == name {
|
if attr.path == &**n {
|
||||||
// Plugins can't gate attributes, so we don't check for it
|
// Plugins can't gate attributes, so we don't check for it
|
||||||
// unlike the code above; we only use this loop to
|
// unlike the code above; we only use this loop to
|
||||||
// short-circuit to avoid the checks below
|
// short-circuit to avoid the checks below
|
||||||
debug!("check_attribute: {:?} is registered by a plugin, {:?}", name, ty);
|
debug!("check_attribute: {:?} is registered by a plugin, {:?}", attr.path, ty);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if name.starts_with("rustc_") {
|
if name.as_str().starts_with("rustc_") {
|
||||||
gate_feature!(self, rustc_attrs, attr.span,
|
gate_feature!(self, rustc_attrs, attr.span,
|
||||||
"unless otherwise specified, attributes \
|
"unless otherwise specified, attributes \
|
||||||
with the prefix `rustc_` \
|
with the prefix `rustc_` \
|
||||||
are reserved for internal compiler diagnostics");
|
are reserved for internal compiler diagnostics");
|
||||||
} else if name.starts_with("derive_") {
|
} else if name.as_str().starts_with("derive_") {
|
||||||
gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE);
|
gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE);
|
||||||
} else if attr::is_known(attr) {
|
} else if !attr::is_known(attr) {
|
||||||
debug!("check_attribute: {:?} is known", name);
|
|
||||||
} else {
|
|
||||||
// Only run the custom attribute lint during regular
|
// Only run the custom attribute lint during regular
|
||||||
// feature gate checking. Macro gating runs
|
// feature gate checking. Macro gating runs
|
||||||
// before the plugin attributes are registered
|
// before the plugin attributes are registered
|
||||||
|
@ -901,7 +900,7 @@ impl<'a> Context<'a> {
|
||||||
unknown to the compiler and \
|
unknown to the compiler and \
|
||||||
may have meaning \
|
may have meaning \
|
||||||
added to it in the future",
|
added to it in the future",
|
||||||
name));
|
attr.path));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1100,7 +1099,12 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||||
self.context.check_attribute(attr, false);
|
self.context.check_attribute(attr, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
if contains_novel_literal(&attr.value) {
|
if self.context.features.proc_macro && attr::is_known(attr) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
|
||||||
|
if contains_novel_literal(&meta) {
|
||||||
gate_feature_post!(&self, attr_literals, attr.span,
|
gate_feature_post!(&self, attr_literals, attr.span,
|
||||||
"non-string literals in attributes, or string \
|
"non-string literals in attributes, or string \
|
||||||
literals in top-level positions, are experimental");
|
literals in top-level positions, are experimental");
|
||||||
|
@ -1163,8 +1167,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||||
`#[repr(simd)]` instead");
|
`#[repr(simd)]` instead");
|
||||||
}
|
}
|
||||||
for attr in &i.attrs {
|
for attr in &i.attrs {
|
||||||
if attr.name() == "repr" {
|
if attr.path == "repr" {
|
||||||
for item in attr.meta_item_list().unwrap_or(&[]) {
|
for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
|
||||||
if item.check_name("simd") {
|
if item.check_name("simd") {
|
||||||
gate_feature_post!(&self, repr_simd, i.span,
|
gate_feature_post!(&self, repr_simd, i.span,
|
||||||
"SIMD types are experimental \
|
"SIMD types are experimental \
|
||||||
|
|
|
@ -489,7 +489,8 @@ pub fn noop_fold_attribute<T: Folder>(attr: Attribute, fld: &mut T) -> Option<At
|
||||||
Some(Attribute {
|
Some(Attribute {
|
||||||
id: attr.id,
|
id: attr.id,
|
||||||
style: attr.style,
|
style: attr.style,
|
||||||
value: fld.fold_meta_item(attr.value),
|
path: fld.fold_path(attr.path),
|
||||||
|
tokens: fld.fold_tts(attr.tokens),
|
||||||
is_sugared_doc: attr.is_sugared_doc,
|
is_sugared_doc: attr.is_sugared_doc,
|
||||||
span: fld.new_span(attr.span),
|
span: fld.new_span(attr.span),
|
||||||
})
|
})
|
||||||
|
@ -612,7 +613,7 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
|
||||||
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
|
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
|
||||||
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
|
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
|
||||||
token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
|
token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
|
||||||
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
|
token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)),
|
||||||
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
|
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
|
||||||
token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
|
token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
|
||||||
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
|
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
|
||||||
|
@ -1371,7 +1372,7 @@ mod tests {
|
||||||
matches_codepattern,
|
matches_codepattern,
|
||||||
"matches_codepattern",
|
"matches_codepattern",
|
||||||
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
|
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
|
||||||
"#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
|
"#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
// even inside macro defs....
|
// even inside macro defs....
|
||||||
|
|
|
@ -65,6 +65,16 @@ macro_rules! panictry {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! unwrap_or {
|
||||||
|
($opt:expr, $default:expr) => {
|
||||||
|
match $opt {
|
||||||
|
Some(x) => x,
|
||||||
|
None => $default,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod diagnostics {
|
pub mod diagnostics {
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
|
|
@ -14,8 +14,9 @@ use syntax_pos::{mk_sp, Span};
|
||||||
use codemap::spanned;
|
use codemap::spanned;
|
||||||
use parse::common::SeqSep;
|
use parse::common::SeqSep;
|
||||||
use parse::PResult;
|
use parse::PResult;
|
||||||
use parse::token;
|
use parse::token::{self, Nonterminal};
|
||||||
use parse::parser::{Parser, TokenType};
|
use parse::parser::{Parser, TokenType, PathStyle};
|
||||||
|
use tokenstream::TokenStream;
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Debug)]
|
#[derive(PartialEq, Eq, Debug)]
|
||||||
enum InnerAttributeParsePolicy<'a> {
|
enum InnerAttributeParsePolicy<'a> {
|
||||||
|
@ -91,7 +92,7 @@ impl<'a> Parser<'a> {
|
||||||
debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
|
debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
|
||||||
inner_parse_policy,
|
inner_parse_policy,
|
||||||
self.token);
|
self.token);
|
||||||
let (span, value, mut style) = match self.token {
|
let (span, path, tokens, mut style) = match self.token {
|
||||||
token::Pound => {
|
token::Pound => {
|
||||||
let lo = self.span.lo;
|
let lo = self.span.lo;
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -119,11 +120,11 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
self.expect(&token::OpenDelim(token::Bracket))?;
|
self.expect(&token::OpenDelim(token::Bracket))?;
|
||||||
let meta_item = self.parse_meta_item()?;
|
let (path, tokens) = self.parse_path_and_tokens()?;
|
||||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||||
let hi = self.prev_span.hi;
|
let hi = self.prev_span.hi;
|
||||||
|
|
||||||
(mk_sp(lo, hi), meta_item, style)
|
(mk_sp(lo, hi), path, tokens, style)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let token_str = self.this_token_to_string();
|
let token_str = self.this_token_to_string();
|
||||||
|
@ -143,12 +144,30 @@ impl<'a> Parser<'a> {
|
||||||
Ok(ast::Attribute {
|
Ok(ast::Attribute {
|
||||||
id: attr::mk_attr_id(),
|
id: attr::mk_attr_id(),
|
||||||
style: style,
|
style: style,
|
||||||
value: value,
|
path: path,
|
||||||
|
tokens: tokens,
|
||||||
is_sugared_doc: false,
|
is_sugared_doc: false,
|
||||||
span: span,
|
span: span,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
|
||||||
|
let meta = match self.token {
|
||||||
|
token::Interpolated(ref nt) => match **nt {
|
||||||
|
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
Ok(if let Some(meta) = meta {
|
||||||
|
self.bump();
|
||||||
|
(ast::Path::from_ident(meta.span, ast::Ident::with_empty_ctxt(meta.name)),
|
||||||
|
meta.node.tokens(meta.span))
|
||||||
|
} else {
|
||||||
|
(self.parse_path(PathStyle::Mod)?, self.parse_tokens())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Parse attributes that appear after the opening of an item. These should
|
/// Parse attributes that appear after the opening of an item. These should
|
||||||
/// be preceded by an exclamation mark, but we accept and warn about one
|
/// be preceded by an exclamation mark, but we accept and warn about one
|
||||||
/// terminated by a semicolon.
|
/// terminated by a semicolon.
|
||||||
|
@ -221,15 +240,20 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
let lo = self.span.lo;
|
let lo = self.span.lo;
|
||||||
let ident = self.parse_ident()?;
|
let ident = self.parse_ident()?;
|
||||||
let node = if self.eat(&token::Eq) {
|
let node = self.parse_meta_item_kind()?;
|
||||||
|
let hi = self.prev_span.hi;
|
||||||
|
Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
|
||||||
|
Ok(if self.eat(&token::Eq) {
|
||||||
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
|
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
|
||||||
} else if self.token == token::OpenDelim(token::Paren) {
|
} else if self.token == token::OpenDelim(token::Paren) {
|
||||||
ast::MetaItemKind::List(self.parse_meta_seq()?)
|
ast::MetaItemKind::List(self.parse_meta_seq()?)
|
||||||
} else {
|
} else {
|
||||||
|
self.eat(&token::OpenDelim(token::Paren));
|
||||||
ast::MetaItemKind::Word
|
ast::MetaItemKind::Word
|
||||||
};
|
})
|
||||||
let hi = self.prev_span.hi;
|
|
||||||
Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
|
/// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
|
||||||
|
|
|
@ -374,38 +374,80 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||||
s[1..].chars().all(|c| '0' <= c && c <= '9')
|
s[1..].chars().all(|c| '0' <= c && c <= '9')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, sd: &Handler, sp: Span)
|
macro_rules! err {
|
||||||
-> ast::LitKind {
|
($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
|
||||||
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
match $opt_diag {
|
||||||
let suffix = match suffix {
|
Some(($span, $diag)) => { $($body)* }
|
||||||
Some(suffix) => suffix,
|
None => return None,
|
||||||
None => return ast::LitKind::FloatUnsuffixed(data),
|
|
||||||
};
|
|
||||||
|
|
||||||
match &*suffix.as_str() {
|
|
||||||
"f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
|
|
||||||
"f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
|
|
||||||
suf => {
|
|
||||||
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
|
||||||
// if it looks like a width, lets try to be helpful.
|
|
||||||
sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]))
|
|
||||||
.help("valid widths are 32 and 64")
|
|
||||||
.emit();
|
|
||||||
} else {
|
|
||||||
sd.struct_span_err(sp, &format!("invalid suffix `{}` for float literal", suf))
|
|
||||||
.help("valid suffixes are `f32` and `f64`")
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
|
|
||||||
ast::LitKind::FloatUnsuffixed(data)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn float_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
|
|
||||||
|
pub fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
|
-> (bool /* suffix illegal? */, Option<ast::LitKind>) {
|
||||||
|
use ast::LitKind;
|
||||||
|
|
||||||
|
match lit {
|
||||||
|
token::Byte(i) => (true, Some(LitKind::Byte(byte_lit(&i.as_str()).0))),
|
||||||
|
token::Char(i) => (true, Some(LitKind::Char(char_lit(&i.as_str()).0))),
|
||||||
|
|
||||||
|
// There are some valid suffixes for integer and float literals,
|
||||||
|
// so all the handling is done internally.
|
||||||
|
token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)),
|
||||||
|
token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)),
|
||||||
|
|
||||||
|
token::Str_(s) => {
|
||||||
|
let s = Symbol::intern(&str_lit(&s.as_str()));
|
||||||
|
(true, Some(LitKind::Str(s, ast::StrStyle::Cooked)))
|
||||||
|
}
|
||||||
|
token::StrRaw(s, n) => {
|
||||||
|
let s = Symbol::intern(&raw_str_lit(&s.as_str()));
|
||||||
|
(true, Some(LitKind::Str(s, ast::StrStyle::Raw(n))))
|
||||||
|
}
|
||||||
|
token::ByteStr(i) => {
|
||||||
|
(true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str()))))
|
||||||
|
}
|
||||||
|
token::ByteStrRaw(i, _) => {
|
||||||
|
(true, Some(LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
|
-> Option<ast::LitKind> {
|
||||||
|
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
||||||
|
let suffix = match suffix {
|
||||||
|
Some(suffix) => suffix,
|
||||||
|
None => return Some(ast::LitKind::FloatUnsuffixed(data)),
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(match &*suffix.as_str() {
|
||||||
|
"f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
|
||||||
|
"f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
|
||||||
|
suf => {
|
||||||
|
err!(diag, |span, diag| {
|
||||||
|
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
||||||
|
// if it looks like a width, lets try to be helpful.
|
||||||
|
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
|
||||||
|
diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
|
||||||
|
} else {
|
||||||
|
let msg = format!("invalid suffix `{}` for float literal", suf);
|
||||||
|
diag.struct_span_err(span, &msg)
|
||||||
|
.help("valid suffixes are `f32` and `f64`")
|
||||||
|
.emit();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ast::LitKind::FloatUnsuffixed(data)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pub fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
|
-> Option<ast::LitKind> {
|
||||||
debug!("float_lit: {:?}, {:?}", s, suffix);
|
debug!("float_lit: {:?}, {:?}", s, suffix);
|
||||||
// FIXME #2252: bounds checking float literals is deferred until trans
|
// FIXME #2252: bounds checking float literals is deferred until trans
|
||||||
let s = s.chars().filter(|&c| c != '_').collect::<String>();
|
let s = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||||
filtered_float_lit(Symbol::intern(&s), suffix, sd, sp)
|
filtered_float_lit(Symbol::intern(&s), suffix, diag)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
|
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
|
||||||
|
@ -500,7 +542,8 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
|
||||||
Rc::new(res)
|
Rc::new(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
|
pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
|
||||||
|
-> Option<ast::LitKind> {
|
||||||
// s can only be ascii, byte indexing is fine
|
// s can only be ascii, byte indexing is fine
|
||||||
|
|
||||||
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||||
|
@ -524,13 +567,16 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
|
||||||
// 1f64 and 2f32 etc. are valid float literals.
|
// 1f64 and 2f32 etc. are valid float literals.
|
||||||
if let Some(suf) = suffix {
|
if let Some(suf) = suffix {
|
||||||
if looks_like_width_suffix(&['f'], &suf.as_str()) {
|
if looks_like_width_suffix(&['f'], &suf.as_str()) {
|
||||||
match base {
|
let err = match base {
|
||||||
16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
|
16 => Some("hexadecimal float literal is not supported"),
|
||||||
8 => sd.span_err(sp, "octal float literal is not supported"),
|
8 => Some("octal float literal is not supported"),
|
||||||
2 => sd.span_err(sp, "binary float literal is not supported"),
|
2 => Some("binary float literal is not supported"),
|
||||||
_ => ()
|
_ => None,
|
||||||
|
};
|
||||||
|
if let Some(err) = err {
|
||||||
|
err!(diag, |span, diag| diag.span_err(span, err));
|
||||||
}
|
}
|
||||||
return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp)
|
return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -539,7 +585,9 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(suf) = suffix {
|
if let Some(suf) = suffix {
|
||||||
if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
|
if suf.as_str().is_empty() {
|
||||||
|
err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
|
||||||
|
}
|
||||||
ty = match &*suf.as_str() {
|
ty = match &*suf.as_str() {
|
||||||
"isize" => ast::LitIntType::Signed(ast::IntTy::Is),
|
"isize" => ast::LitIntType::Signed(ast::IntTy::Is),
|
||||||
"i8" => ast::LitIntType::Signed(ast::IntTy::I8),
|
"i8" => ast::LitIntType::Signed(ast::IntTy::I8),
|
||||||
|
@ -556,17 +604,20 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
|
||||||
suf => {
|
suf => {
|
||||||
// i<digits> and u<digits> look like widths, so lets
|
// i<digits> and u<digits> look like widths, so lets
|
||||||
// give an error message along those lines
|
// give an error message along those lines
|
||||||
if looks_like_width_suffix(&['i', 'u'], suf) {
|
err!(diag, |span, diag| {
|
||||||
sd.struct_span_err(sp, &format!("invalid width `{}` for integer literal",
|
if looks_like_width_suffix(&['i', 'u'], suf) {
|
||||||
&suf[1..]))
|
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
|
||||||
.help("valid widths are 8, 16, 32, 64 and 128")
|
diag.struct_span_err(span, &msg)
|
||||||
.emit();
|
.help("valid widths are 8, 16, 32, 64 and 128")
|
||||||
} else {
|
.emit();
|
||||||
sd.struct_span_err(sp, &format!("invalid suffix `{}` for numeric literal", suf))
|
} else {
|
||||||
.help("the suffix must be one of the integral types \
|
let msg = format!("invalid suffix `{}` for numeric literal", suf);
|
||||||
(`u32`, `isize`, etc)")
|
diag.struct_span_err(span, &msg)
|
||||||
.emit();
|
.help("the suffix must be one of the integral types \
|
||||||
}
|
(`u32`, `isize`, etc)")
|
||||||
|
.emit();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
ty
|
ty
|
||||||
}
|
}
|
||||||
|
@ -576,7 +627,7 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
|
||||||
debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
|
debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
|
||||||
string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
|
string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
|
||||||
|
|
||||||
match u128::from_str_radix(s, base) {
|
Some(match u128::from_str_radix(s, base) {
|
||||||
Ok(r) => ast::LitKind::Int(r, ty),
|
Ok(r) => ast::LitKind::Int(r, ty),
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
// small bases are lexed as if they were base 10, e.g, the string
|
// small bases are lexed as if they were base 10, e.g, the string
|
||||||
|
@ -588,11 +639,11 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
|
||||||
s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
|
s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
|
||||||
|
|
||||||
if !already_errored {
|
if !already_errored {
|
||||||
sd.span_err(sp, "int literal is too large");
|
err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
|
||||||
}
|
}
|
||||||
ast::LitKind::Int(0, ty)
|
ast::LitKind::Int(0, ty)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -961,7 +1012,7 @@ mod tests {
|
||||||
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
|
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
|
||||||
let item = parse_item_from_source_str(name.clone(), source, &sess)
|
let item = parse_item_from_source_str(name.clone(), source, &sess)
|
||||||
.unwrap().unwrap();
|
.unwrap().unwrap();
|
||||||
let docs = item.attrs.iter().filter(|a| a.name() == "doc")
|
let docs = item.attrs.iter().filter(|a| a.path == "doc")
|
||||||
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
|
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
|
||||||
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
|
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
|
||||||
assert_eq!(&docs[..], b);
|
assert_eq!(&docs[..], b);
|
||||||
|
|
|
@ -60,7 +60,6 @@ use util::ThinVec;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::{cmp, mem, slice};
|
use std::{cmp, mem, slice};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
flags Restrictions: u8 {
|
flags Restrictions: u8 {
|
||||||
|
@ -891,7 +890,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
self.parse_seq_to_before_tokens(kets,
|
self.parse_seq_to_before_tokens(kets,
|
||||||
SeqSep::none(),
|
SeqSep::none(),
|
||||||
|p| p.parse_token_tree(),
|
|p| Ok(p.parse_token_tree()),
|
||||||
|mut e| handler.cancel(&mut e));
|
|mut e| handler.cancel(&mut e));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1267,7 +1266,7 @@ impl<'a> Parser<'a> {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
token::OpenDelim(token::Brace) => {
|
token::OpenDelim(token::Brace) => {
|
||||||
self.parse_token_tree()?;
|
self.parse_token_tree();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
_ => self.bump(),
|
_ => self.bump(),
|
||||||
|
@ -1643,44 +1642,15 @@ impl<'a> Parser<'a> {
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
_ => { return self.unexpected_last(&self.token); }
|
||||||
},
|
},
|
||||||
token::Literal(lit, suf) => {
|
token::Literal(lit, suf) => {
|
||||||
let (suffix_illegal, out) = match lit {
|
let diag = Some((self.span, &self.sess.span_diagnostic));
|
||||||
token::Byte(i) => (true, LitKind::Byte(parse::byte_lit(&i.as_str()).0)),
|
let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
|
||||||
token::Char(i) => (true, LitKind::Char(parse::char_lit(&i.as_str()).0)),
|
|
||||||
|
|
||||||
// there are some valid suffixes for integer and
|
|
||||||
// float literals, so all the handling is done
|
|
||||||
// internally.
|
|
||||||
token::Integer(s) => {
|
|
||||||
let diag = &self.sess.span_diagnostic;
|
|
||||||
(false, parse::integer_lit(&s.as_str(), suf, diag, self.span))
|
|
||||||
}
|
|
||||||
token::Float(s) => {
|
|
||||||
let diag = &self.sess.span_diagnostic;
|
|
||||||
(false, parse::float_lit(&s.as_str(), suf, diag, self.span))
|
|
||||||
}
|
|
||||||
|
|
||||||
token::Str_(s) => {
|
|
||||||
let s = Symbol::intern(&parse::str_lit(&s.as_str()));
|
|
||||||
(true, LitKind::Str(s, ast::StrStyle::Cooked))
|
|
||||||
}
|
|
||||||
token::StrRaw(s, n) => {
|
|
||||||
let s = Symbol::intern(&parse::raw_str_lit(&s.as_str()));
|
|
||||||
(true, LitKind::Str(s, ast::StrStyle::Raw(n)))
|
|
||||||
}
|
|
||||||
token::ByteStr(i) => {
|
|
||||||
(true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str())))
|
|
||||||
}
|
|
||||||
token::ByteStrRaw(i, _) => {
|
|
||||||
(true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if suffix_illegal {
|
if suffix_illegal {
|
||||||
let sp = self.span;
|
let sp = self.span;
|
||||||
self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf)
|
self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf)
|
||||||
}
|
}
|
||||||
|
|
||||||
out
|
result.unwrap()
|
||||||
}
|
}
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
_ => { return self.unexpected_last(&self.token); }
|
||||||
};
|
};
|
||||||
|
@ -2108,10 +2078,10 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> {
|
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> {
|
||||||
match self.token {
|
match self.token {
|
||||||
token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
|
token::OpenDelim(delim) => match self.parse_token_tree() {
|
||||||
TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()),
|
TokenTree::Delimited(_, delimited) => Ok((delim, delimited.stream().into())),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}),
|
},
|
||||||
_ => Err(self.fatal("expected open delimiter")),
|
_ => Err(self.fatal("expected open delimiter")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2656,24 +2626,23 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse a single token tree from the input.
|
/// parse a single token tree from the input.
|
||||||
pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
|
pub fn parse_token_tree(&mut self) -> TokenTree {
|
||||||
match self.token {
|
match self.token {
|
||||||
token::OpenDelim(..) => {
|
token::OpenDelim(..) => {
|
||||||
let frame = mem::replace(&mut self.token_cursor.frame,
|
let frame = mem::replace(&mut self.token_cursor.frame,
|
||||||
self.token_cursor.stack.pop().unwrap());
|
self.token_cursor.stack.pop().unwrap());
|
||||||
self.span = frame.span;
|
self.span = frame.span;
|
||||||
self.bump();
|
self.bump();
|
||||||
return Ok(TokenTree::Delimited(frame.span, Delimited {
|
TokenTree::Delimited(frame.span, Delimited {
|
||||||
delim: frame.delim,
|
delim: frame.delim,
|
||||||
tts: frame.tree_cursor.original_stream().into(),
|
tts: frame.tree_cursor.original_stream().into(),
|
||||||
}));
|
})
|
||||||
},
|
},
|
||||||
token::CloseDelim(_) | token::Eof => unreachable!(),
|
token::CloseDelim(_) | token::Eof => unreachable!(),
|
||||||
_ => {
|
_ => {
|
||||||
let token = mem::replace(&mut self.token, token::Underscore);
|
let token = mem::replace(&mut self.token, token::Underscore);
|
||||||
let res = Ok(TokenTree::Token(self.span, token));
|
|
||||||
self.bump();
|
self.bump();
|
||||||
res
|
TokenTree::Token(self.prev_span, token)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2683,11 +2652,22 @@ impl<'a> Parser<'a> {
|
||||||
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
|
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
|
||||||
let mut tts = Vec::new();
|
let mut tts = Vec::new();
|
||||||
while self.token != token::Eof {
|
while self.token != token::Eof {
|
||||||
tts.push(self.parse_token_tree()?);
|
tts.push(self.parse_token_tree());
|
||||||
}
|
}
|
||||||
Ok(tts)
|
Ok(tts)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn parse_tokens(&mut self) -> TokenStream {
|
||||||
|
let mut result = Vec::new();
|
||||||
|
loop {
|
||||||
|
match self.token {
|
||||||
|
token::Eof | token::CloseDelim(..) => break,
|
||||||
|
_ => result.push(self.parse_token_tree().into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenStream::concat(result)
|
||||||
|
}
|
||||||
|
|
||||||
/// Parse a prefix-unary-operator expr
|
/// Parse a prefix-unary-operator expr
|
||||||
pub fn parse_prefix_expr(&mut self,
|
pub fn parse_prefix_expr(&mut self,
|
||||||
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
||||||
|
@ -5181,11 +5161,9 @@ impl<'a> Parser<'a> {
|
||||||
let attr = ast::Attribute {
|
let attr = ast::Attribute {
|
||||||
id: attr::mk_attr_id(),
|
id: attr::mk_attr_id(),
|
||||||
style: ast::AttrStyle::Outer,
|
style: ast::AttrStyle::Outer,
|
||||||
value: ast::MetaItem {
|
path: ast::Path::from_ident(syntax_pos::DUMMY_SP,
|
||||||
name: Symbol::intern("warn_directory_ownership"),
|
Ident::from_str("warn_directory_ownership")),
|
||||||
node: ast::MetaItemKind::Word,
|
tokens: TokenStream::empty(),
|
||||||
span: syntax_pos::DUMMY_SP,
|
|
||||||
},
|
|
||||||
is_sugared_doc: false,
|
is_sugared_doc: false,
|
||||||
span: syntax_pos::DUMMY_SP,
|
span: syntax_pos::DUMMY_SP,
|
||||||
};
|
};
|
||||||
|
|
|
@ -17,7 +17,7 @@ pub use self::Token::*;
|
||||||
use ast::{self};
|
use ast::{self};
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::keywords;
|
use symbol::keywords;
|
||||||
use tokenstream;
|
use tokenstream::TokenTree;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
@ -349,7 +349,7 @@ pub enum Nonterminal {
|
||||||
/// Stuff inside brackets for attributes
|
/// Stuff inside brackets for attributes
|
||||||
NtMeta(ast::MetaItem),
|
NtMeta(ast::MetaItem),
|
||||||
NtPath(ast::Path),
|
NtPath(ast::Path),
|
||||||
NtTT(tokenstream::TokenTree),
|
NtTT(TokenTree),
|
||||||
// These are not exposed to macros, but are used by quasiquote.
|
// These are not exposed to macros, but are used by quasiquote.
|
||||||
NtArm(ast::Arm),
|
NtArm(ast::Arm),
|
||||||
NtImplItem(ast::ImplItem),
|
NtImplItem(ast::ImplItem),
|
||||||
|
|
|
@ -28,7 +28,7 @@ use ptr::P;
|
||||||
use std_inject;
|
use std_inject;
|
||||||
use symbol::{Symbol, keywords};
|
use symbol::{Symbol, keywords};
|
||||||
use syntax_pos::DUMMY_SP;
|
use syntax_pos::DUMMY_SP;
|
||||||
use tokenstream::{self, TokenTree};
|
use tokenstream::{self, TokenStream, TokenTree};
|
||||||
|
|
||||||
use std::ascii;
|
use std::ascii;
|
||||||
use std::io::{self, Write, Read};
|
use std::io::{self, Write, Read};
|
||||||
|
@ -329,6 +329,10 @@ pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
|
||||||
to_string(|s| s.print_tts(tts.iter().cloned().collect()))
|
to_string(|s| s.print_tts(tts.iter().cloned().collect()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn tokens_to_string(tokens: TokenStream) -> String {
|
||||||
|
to_string(|s| s.print_tts(tokens))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
|
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
|
||||||
to_string(|s| s.print_stmt(stmt))
|
to_string(|s| s.print_stmt(stmt))
|
||||||
}
|
}
|
||||||
|
@ -750,7 +754,21 @@ pub trait PrintState<'a> {
|
||||||
ast::AttrStyle::Inner => word(self.writer(), "#![")?,
|
ast::AttrStyle::Inner => word(self.writer(), "#![")?,
|
||||||
ast::AttrStyle::Outer => word(self.writer(), "#[")?,
|
ast::AttrStyle::Outer => word(self.writer(), "#[")?,
|
||||||
}
|
}
|
||||||
self.print_meta_item(&attr.meta())?;
|
if let Some(mi) = attr.meta() {
|
||||||
|
self.print_meta_item(&mi)?
|
||||||
|
} else {
|
||||||
|
for (i, segment) in attr.path.segments.iter().enumerate() {
|
||||||
|
if i > 0 {
|
||||||
|
word(self.writer(), "::")?
|
||||||
|
}
|
||||||
|
if segment.identifier.name != keywords::CrateRoot.name() &&
|
||||||
|
segment.identifier.name != "$crate" {
|
||||||
|
word(self.writer(), &segment.identifier.name.as_str())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
space(self.writer())?;
|
||||||
|
self.print_tts(attr.tokens.clone())?;
|
||||||
|
}
|
||||||
word(self.writer(), "]")
|
word(self.writer(), "]")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -789,6 +807,45 @@ pub trait PrintState<'a> {
|
||||||
self.end()
|
self.end()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This doesn't deserve to be called "pretty" printing, but it should be
|
||||||
|
/// meaning-preserving. A quick hack that might help would be to look at the
|
||||||
|
/// spans embedded in the TTs to decide where to put spaces and newlines.
|
||||||
|
/// But it'd be better to parse these according to the grammar of the
|
||||||
|
/// appropriate macro, transcribe back into the grammar we just parsed from,
|
||||||
|
/// and then pretty-print the resulting AST nodes (so, e.g., we print
|
||||||
|
/// expression arguments as expressions). It can be done! I think.
|
||||||
|
fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
|
||||||
|
match tt {
|
||||||
|
TokenTree::Token(_, ref tk) => {
|
||||||
|
word(self.writer(), &token_to_string(tk))?;
|
||||||
|
match *tk {
|
||||||
|
parse::token::DocComment(..) => {
|
||||||
|
hardbreak(self.writer())
|
||||||
|
}
|
||||||
|
_ => Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenTree::Delimited(_, ref delimed) => {
|
||||||
|
word(self.writer(), &token_to_string(&delimed.open_token()))?;
|
||||||
|
space(self.writer())?;
|
||||||
|
self.print_tts(delimed.stream())?;
|
||||||
|
space(self.writer())?;
|
||||||
|
word(self.writer(), &token_to_string(&delimed.close_token()))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> {
|
||||||
|
self.ibox(0)?;
|
||||||
|
for (i, tt) in tts.into_trees().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
space(self.writer())?;
|
||||||
|
}
|
||||||
|
self.print_tt(tt)?;
|
||||||
|
}
|
||||||
|
self.end()
|
||||||
|
}
|
||||||
|
|
||||||
fn space_if_not_bol(&mut self) -> io::Result<()> {
|
fn space_if_not_bol(&mut self) -> io::Result<()> {
|
||||||
if !self.is_bol() { space(self.writer())?; }
|
if !self.is_bol() { space(self.writer())?; }
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1458,45 +1515,6 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This doesn't deserve to be called "pretty" printing, but it should be
|
|
||||||
/// meaning-preserving. A quick hack that might help would be to look at the
|
|
||||||
/// spans embedded in the TTs to decide where to put spaces and newlines.
|
|
||||||
/// But it'd be better to parse these according to the grammar of the
|
|
||||||
/// appropriate macro, transcribe back into the grammar we just parsed from,
|
|
||||||
/// and then pretty-print the resulting AST nodes (so, e.g., we print
|
|
||||||
/// expression arguments as expressions). It can be done! I think.
|
|
||||||
pub fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
|
|
||||||
match tt {
|
|
||||||
TokenTree::Token(_, ref tk) => {
|
|
||||||
word(&mut self.s, &token_to_string(tk))?;
|
|
||||||
match *tk {
|
|
||||||
parse::token::DocComment(..) => {
|
|
||||||
hardbreak(&mut self.s)
|
|
||||||
}
|
|
||||||
_ => Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TokenTree::Delimited(_, ref delimed) => {
|
|
||||||
word(&mut self.s, &token_to_string(&delimed.open_token()))?;
|
|
||||||
space(&mut self.s)?;
|
|
||||||
self.print_tts(delimed.stream())?;
|
|
||||||
space(&mut self.s)?;
|
|
||||||
word(&mut self.s, &token_to_string(&delimed.close_token()))
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> {
|
|
||||||
self.ibox(0)?;
|
|
||||||
for (i, tt) in tts.into_trees().enumerate() {
|
|
||||||
if i != 0 {
|
|
||||||
space(&mut self.s)?;
|
|
||||||
}
|
|
||||||
self.print_tt(tt)?;
|
|
||||||
}
|
|
||||||
self.end()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
|
pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
|
||||||
self.head("")?;
|
self.head("")?;
|
||||||
let generics = ast::Generics::default();
|
let generics = ast::Generics::default();
|
||||||
|
|
|
@ -15,6 +15,7 @@ use syntax_pos::{DUMMY_SP, Span};
|
||||||
use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute};
|
use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute};
|
||||||
use parse::ParseSess;
|
use parse::ParseSess;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
|
use tokenstream::TokenStream;
|
||||||
|
|
||||||
/// Craft a span that will be ignored by the stability lint's
|
/// Craft a span that will be ignored by the stability lint's
|
||||||
/// call to codemap's is_internal check.
|
/// call to codemap's is_internal check.
|
||||||
|
@ -70,11 +71,8 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
|
||||||
krate.module.items.insert(0, P(ast::Item {
|
krate.module.items.insert(0, P(ast::Item {
|
||||||
attrs: vec![ast::Attribute {
|
attrs: vec![ast::Attribute {
|
||||||
style: ast::AttrStyle::Outer,
|
style: ast::AttrStyle::Outer,
|
||||||
value: ast::MetaItem {
|
path: ast::Path::from_ident(span, ast::Ident::from_str("prelude_import")),
|
||||||
name: Symbol::intern("prelude_import"),
|
tokens: TokenStream::empty(),
|
||||||
node: ast::MetaItemKind::Word,
|
|
||||||
span: span,
|
|
||||||
},
|
|
||||||
id: attr::mk_attr_id(),
|
id: attr::mk_attr_id(),
|
||||||
is_sugared_doc: false,
|
is_sugared_doc: false,
|
||||||
span: span,
|
span: span,
|
||||||
|
|
|
@ -360,7 +360,7 @@ impl PartialEq<ThinTokenStream> for ThinTokenStream {
|
||||||
|
|
||||||
impl fmt::Display for TokenStream {
|
impl fmt::Display for TokenStream {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
f.write_str(&pprust::tts_to_string(&self.trees().collect::<Vec<_>>()))
|
f.write_str(&pprust::tokens_to_string(self.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,9 +23,11 @@ struct MarkAttrs<'a>(&'a [ast::Name]);
|
||||||
|
|
||||||
impl<'a> Visitor<'a> for MarkAttrs<'a> {
|
impl<'a> Visitor<'a> for MarkAttrs<'a> {
|
||||||
fn visit_attribute(&mut self, attr: &Attribute) {
|
fn visit_attribute(&mut self, attr: &Attribute) {
|
||||||
if self.0.contains(&attr.name()) {
|
if let Some(name) = attr.name() {
|
||||||
mark_used(attr);
|
if self.0.contains(&name) {
|
||||||
mark_known(attr);
|
mark_used(attr);
|
||||||
|
mark_known(attr);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -439,7 +439,7 @@ impl<'a> TraitDef<'a> {
|
||||||
attrs.extend(item.attrs
|
attrs.extend(item.attrs
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|a| {
|
.filter(|a| {
|
||||||
match &*a.name().as_str() {
|
a.name().is_some() && match &*a.name().unwrap().as_str() {
|
||||||
"allow" | "warn" | "deny" | "forbid" | "stable" | "unstable" => true,
|
"allow" | "warn" | "deny" | "forbid" | "stable" | "unstable" => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
|
|
@ -248,7 +248,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
|
||||||
fn visit_item(&mut self, item: &'a ast::Item) {
|
fn visit_item(&mut self, item: &'a ast::Item) {
|
||||||
if let ast::ItemKind::MacroDef(..) = item.node {
|
if let ast::ItemKind::MacroDef(..) = item.node {
|
||||||
if self.is_proc_macro_crate &&
|
if self.is_proc_macro_crate &&
|
||||||
item.attrs.iter().any(|attr| attr.name() == "macro_export") {
|
item.attrs.iter().any(|attr| attr.path == "macro_export") {
|
||||||
let msg =
|
let msg =
|
||||||
"cannot export macro_rules! macros from a `proc-macro` crate type currently";
|
"cannot export macro_rules! macros from a `proc-macro` crate type currently";
|
||||||
self.handler.span_err(item.span, msg);
|
self.handler.span_err(item.span, msg);
|
||||||
|
@ -270,12 +270,12 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
|
||||||
for attr in &item.attrs {
|
for attr in &item.attrs {
|
||||||
if is_proc_macro_attr(&attr) {
|
if is_proc_macro_attr(&attr) {
|
||||||
if let Some(prev_attr) = found_attr {
|
if let Some(prev_attr) = found_attr {
|
||||||
let msg = if attr.name() == prev_attr.name() {
|
let msg = if attr.path == prev_attr.path {
|
||||||
format!("Only one `#[{}]` attribute is allowed on any given function",
|
format!("Only one `#[{}]` attribute is allowed on any given function",
|
||||||
attr.name())
|
attr.path)
|
||||||
} else {
|
} else {
|
||||||
format!("`#[{}]` and `#[{}]` attributes cannot both be applied \
|
format!("`#[{}]` and `#[{}]` attributes cannot both be applied \
|
||||||
to the same function", attr.name(), prev_attr.name())
|
to the same function", attr.path, prev_attr.path)
|
||||||
};
|
};
|
||||||
|
|
||||||
self.handler.struct_span_err(attr.span(), &msg)
|
self.handler.struct_span_err(attr.span(), &msg)
|
||||||
|
@ -299,7 +299,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
|
||||||
|
|
||||||
if !is_fn {
|
if !is_fn {
|
||||||
let msg = format!("the `#[{}]` attribute may only be used on bare functions",
|
let msg = format!("the `#[{}]` attribute may only be used on bare functions",
|
||||||
attr.name());
|
attr.path);
|
||||||
|
|
||||||
self.handler.span_err(attr.span(), &msg);
|
self.handler.span_err(attr.span(), &msg);
|
||||||
return;
|
return;
|
||||||
|
@ -311,7 +311,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
|
||||||
|
|
||||||
if !self.is_proc_macro_crate {
|
if !self.is_proc_macro_crate {
|
||||||
let msg = format!("the `#[{}]` attribute is only usable with crates of the \
|
let msg = format!("the `#[{}]` attribute is only usable with crates of the \
|
||||||
`proc-macro` crate type", attr.name());
|
`proc-macro` crate type", attr.path);
|
||||||
|
|
||||||
self.handler.span_err(attr.span(), &msg);
|
self.handler.span_err(attr.span(), &msg);
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -20,6 +20,7 @@ extern crate derive_b;
|
||||||
#[C] //~ ERROR: The attribute `C` is currently unknown to the compiler
|
#[C] //~ ERROR: The attribute `C` is currently unknown to the compiler
|
||||||
#[B(D)]
|
#[B(D)]
|
||||||
#[B(E = "foo")]
|
#[B(E = "foo")]
|
||||||
|
#[B arbitrary tokens] //~ expected one of `(` or `=`, found `arbitrary`
|
||||||
struct B;
|
struct B;
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -8,7 +8,5 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
// compile-flags: -Z parse-only
|
|
||||||
|
|
||||||
#[doc = $not_there] //~ error: unexpected token: `$`
|
#[doc = $not_there] //~ error: unexpected token: `$`
|
||||||
fn main() { }
|
fn main() { }
|
|
@ -14,4 +14,5 @@ fn main() {
|
||||||
globnar::brotz!(); //~ ERROR non-ident macro paths are experimental
|
globnar::brotz!(); //~ ERROR non-ident macro paths are experimental
|
||||||
::foo!(); //~ ERROR non-ident macro paths are experimental
|
::foo!(); //~ ERROR non-ident macro paths are experimental
|
||||||
foo::<T>!(); //~ ERROR type parameters are not allowed on macros
|
foo::<T>!(); //~ ERROR type parameters are not allowed on macros
|
||||||
|
#[derive(foo::Bar)] struct T; //~ ERROR non-ident macro paths are experimental
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,11 +9,11 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
#[derive(Copy(Bad))]
|
#[derive(Copy(Bad))]
|
||||||
//~^ ERROR malformed `derive` entry
|
//~^ ERROR expected one of `)`, `,`, or `::`, found `(`
|
||||||
struct Test1;
|
struct Test1;
|
||||||
|
|
||||||
#[derive(Copy="bad")]
|
#[derive(Copy="bad")]
|
||||||
//~^ ERROR malformed `derive` entry
|
//~^ ERROR expected one of `)`, `,`, or `::`, found `=`
|
||||||
struct Test2;
|
struct Test2;
|
||||||
|
|
||||||
#[derive()]
|
#[derive()]
|
||||||
|
|
25
src/test/compile-fail/suffixed-literal-meta.rs
Normal file
25
src/test/compile-fail/suffixed-literal-meta.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
#![feature(attr_literals)]
|
||||||
|
|
||||||
|
#[path = 1usize] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1u8] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1u16] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1u32] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1u64] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1isize] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1i8] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1i16] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1i32] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1i64] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
#[path = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes
|
||||||
|
fn main() { }
|
|
@ -8,10 +8,6 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
// compile-flags: -Z parse-only
|
|
||||||
|
|
||||||
// error-pattern:expected one of `=` or `]`
|
|
||||||
|
|
||||||
// asterisk is bogus
|
// asterisk is bogus
|
||||||
#[attr*]
|
#[path*] //~ ERROR expected one of `(` or `=`
|
||||||
mod m {}
|
mod m {}
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
|
||||||
// file at the top-level directory of this distribution and at
|
|
||||||
// http://rust-lang.org/COPYRIGHT.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
||||||
// option. This file may not be copied, modified, or distributed
|
|
||||||
// except according to those terms.
|
|
||||||
|
|
||||||
// compile-flags: -Z parse-only
|
|
||||||
|
|
||||||
#[foo = 1usize] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1u8] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1u16] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1u32] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1u64] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1isize] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1i8] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1i16] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1i32] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1i64] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
#[foo = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes
|
|
||||||
fn main() { }
|
|
|
@ -19,6 +19,6 @@ use attr_args::attr_with_args;
|
||||||
#[attr_with_args(text = "Hello, world!")]
|
#[attr_with_args(text = "Hello, world!")]
|
||||||
fn foo() {}
|
fn foo() {}
|
||||||
|
|
||||||
fn main() {
|
#[::attr_args::identity
|
||||||
assert_eq!(foo(), "Hello, world!");
|
fn main() { assert_eq!(foo(), "Hello, world!"); }]
|
||||||
}
|
struct Dummy;
|
||||||
|
|
|
@ -30,3 +30,8 @@ pub fn attr_with_args(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
fn foo() -> &'static str { "Hello, world!" }
|
fn foo() -> &'static str { "Hello, world!" }
|
||||||
"#.parse().unwrap()
|
"#.parse().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[proc_macro_attribute]
|
||||||
|
pub fn identity(attr_args: TokenStream, _: TokenStream) -> TokenStream {
|
||||||
|
attr_args
|
||||||
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ use proc_macro::TokenStream;
|
||||||
#[proc_macro_derive(B, attributes(B, C))]
|
#[proc_macro_derive(B, attributes(B, C))]
|
||||||
pub fn derive(input: TokenStream) -> TokenStream {
|
pub fn derive(input: TokenStream) -> TokenStream {
|
||||||
let input = input.to_string();
|
let input = input.to_string();
|
||||||
assert!(input.contains("#[B]"));
|
assert!(input.contains("#[B arbitrary tokens]"));
|
||||||
assert!(input.contains("struct B {"));
|
assert!(input.contains("struct B {"));
|
||||||
assert!(input.contains("#[C]"));
|
assert!(input.contains("#[C]"));
|
||||||
"".parse().unwrap()
|
"".parse().unwrap()
|
||||||
|
|
|
@ -11,11 +11,12 @@
|
||||||
// aux-build:derive-b.rs
|
// aux-build:derive-b.rs
|
||||||
// ignore-stage1
|
// ignore-stage1
|
||||||
|
|
||||||
#[macro_use]
|
#![feature(proc_macro)]
|
||||||
|
|
||||||
extern crate derive_b;
|
extern crate derive_b;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, B, Eq, Copy, Clone)]
|
#[derive(Debug, PartialEq, derive_b::B, Eq, Copy, Clone)]
|
||||||
#[B]
|
#[cfg_attr(all(), B arbitrary tokens)]
|
||||||
struct B {
|
struct B {
|
||||||
#[C]
|
#[C]
|
||||||
a: u64
|
a: u64
|
||||||
|
|
|
@ -2,7 +2,7 @@ error[E0536]: expected 1 cfg-pattern
|
||||||
--> $DIR/E0536.rs:11:7
|
--> $DIR/E0536.rs:11:7
|
||||||
|
|
|
|
||||||
11 | #[cfg(not())] //~ ERROR E0536
|
11 | #[cfg(not())] //~ ERROR E0536
|
||||||
| ^^^^^
|
| ^^^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ error[E0537]: invalid predicate `unknown`
|
||||||
--> $DIR/E0537.rs:11:7
|
--> $DIR/E0537.rs:11:7
|
||||||
|
|
|
|
||||||
11 | #[cfg(unknown())] //~ ERROR E0537
|
11 | #[cfg(unknown())] //~ ERROR E0537
|
||||||
| ^^^^^^^^^
|
| ^^^^^^^
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue