mv compiler to compiler/
This commit is contained in:
parent
db534b3ac2
commit
9e5f7d5631
1686 changed files with 941 additions and 1051 deletions
131
compiler/rustc_macros/src/hash_stable.rs
Normal file
131
compiler/rustc_macros/src/hash_stable.rs
Normal file
|
@ -0,0 +1,131 @@
|
|||
use proc_macro2::{self, Ident};
|
||||
use quote::quote;
|
||||
use syn::{self, parse_quote, Meta, NestedMeta};
|
||||
|
||||
struct Attributes {
|
||||
ignore: bool,
|
||||
project: Option<Ident>,
|
||||
}
|
||||
|
||||
fn parse_attributes(field: &syn::Field) -> Attributes {
|
||||
let mut attrs = Attributes { ignore: false, project: None };
|
||||
for attr in &field.attrs {
|
||||
if let Ok(meta) = attr.parse_meta() {
|
||||
if !meta.path().is_ident("stable_hasher") {
|
||||
continue;
|
||||
}
|
||||
let mut any_attr = false;
|
||||
if let Meta::List(list) = meta {
|
||||
for nested in list.nested.iter() {
|
||||
if let NestedMeta::Meta(meta) = nested {
|
||||
if meta.path().is_ident("ignore") {
|
||||
attrs.ignore = true;
|
||||
any_attr = true;
|
||||
}
|
||||
if meta.path().is_ident("project") {
|
||||
if let Meta::List(list) = meta {
|
||||
if let Some(nested) = list.nested.iter().next() {
|
||||
if let NestedMeta::Meta(meta) = nested {
|
||||
attrs.project = meta.path().get_ident().cloned();
|
||||
any_attr = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !any_attr {
|
||||
panic!("error parsing stable_hasher");
|
||||
}
|
||||
}
|
||||
}
|
||||
attrs
|
||||
}
|
||||
|
||||
pub fn hash_stable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
let generic: syn::GenericParam = parse_quote!(__CTX);
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
s.add_impl_generic(generic);
|
||||
s.add_where_predicate(parse_quote! { __CTX: crate::HashStableContext });
|
||||
let body = s.each(|bi| {
|
||||
let attrs = parse_attributes(bi.ast());
|
||||
if attrs.ignore {
|
||||
quote! {}
|
||||
} else if let Some(project) = attrs.project {
|
||||
quote! {
|
||||
&#bi.#project.hash_stable(__hcx, __hasher);
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
#bi.hash_stable(__hcx, __hasher);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let discriminant = match s.ast().data {
|
||||
syn::Data::Enum(_) => quote! {
|
||||
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
|
||||
},
|
||||
syn::Data::Struct(_) => quote! {},
|
||||
syn::Data::Union(_) => panic!("cannot derive on union"),
|
||||
};
|
||||
|
||||
s.bound_impl(
|
||||
quote!(::rustc_data_structures::stable_hasher::HashStable<__CTX>),
|
||||
quote! {
|
||||
fn hash_stable(
|
||||
&self,
|
||||
__hcx: &mut __CTX,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) {
|
||||
#discriminant
|
||||
match *self { #body }
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn hash_stable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
let generic: syn::GenericParam = parse_quote!('__ctx);
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
s.add_impl_generic(generic);
|
||||
let body = s.each(|bi| {
|
||||
let attrs = parse_attributes(bi.ast());
|
||||
if attrs.ignore {
|
||||
quote! {}
|
||||
} else if let Some(project) = attrs.project {
|
||||
quote! {
|
||||
&#bi.#project.hash_stable(__hcx, __hasher);
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
#bi.hash_stable(__hcx, __hasher);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let discriminant = match s.ast().data {
|
||||
syn::Data::Enum(_) => quote! {
|
||||
::std::mem::discriminant(self).hash_stable(__hcx, __hasher);
|
||||
},
|
||||
syn::Data::Struct(_) => quote! {},
|
||||
syn::Data::Union(_) => panic!("cannot derive on union"),
|
||||
};
|
||||
|
||||
s.bound_impl(
|
||||
quote!(
|
||||
::rustc_data_structures::stable_hasher::HashStable<
|
||||
::rustc_middle::ich::StableHashingContext<'__ctx>,
|
||||
>
|
||||
),
|
||||
quote! {
|
||||
fn hash_stable(
|
||||
&self,
|
||||
__hcx: &mut ::rustc_middle::ich::StableHashingContext<'__ctx>,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) {
|
||||
#discriminant
|
||||
match *self { #body }
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
38
compiler/rustc_macros/src/lib.rs
Normal file
38
compiler/rustc_macros/src/lib.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
#![allow(rustc::default_hash_types)]
|
||||
#![recursion_limit = "128"]
|
||||
|
||||
use synstructure::decl_derive;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
mod hash_stable;
|
||||
mod lift;
|
||||
mod query;
|
||||
mod serialize;
|
||||
mod symbols;
|
||||
mod type_foldable;
|
||||
|
||||
#[proc_macro]
|
||||
pub fn rustc_queries(input: TokenStream) -> TokenStream {
|
||||
query::rustc_queries(input)
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn symbols(input: TokenStream) -> TokenStream {
|
||||
symbols::symbols(input)
|
||||
}
|
||||
|
||||
decl_derive!([HashStable, attributes(stable_hasher)] => hash_stable::hash_stable_derive);
|
||||
decl_derive!(
|
||||
[HashStable_Generic, attributes(stable_hasher)] =>
|
||||
hash_stable::hash_stable_generic_derive
|
||||
);
|
||||
|
||||
decl_derive!([Decodable] => serialize::decodable_derive);
|
||||
decl_derive!([Encodable] => serialize::encodable_derive);
|
||||
decl_derive!([TyDecodable] => serialize::type_decodable_derive);
|
||||
decl_derive!([TyEncodable] => serialize::type_encodable_derive);
|
||||
decl_derive!([MetadataDecodable] => serialize::meta_decodable_derive);
|
||||
decl_derive!([MetadataEncodable] => serialize::meta_encodable_derive);
|
||||
decl_derive!([TypeFoldable, attributes(type_foldable)] => type_foldable::type_foldable_derive);
|
||||
decl_derive!([Lift, attributes(lift)] => lift::lift_derive);
|
51
compiler/rustc_macros/src/lift.rs
Normal file
51
compiler/rustc_macros/src/lift.rs
Normal file
|
@ -0,0 +1,51 @@
|
|||
use quote::quote;
|
||||
use syn::{self, parse_quote};
|
||||
|
||||
pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
||||
let tcx: syn::Lifetime = parse_quote!('tcx);
|
||||
let newtcx: syn::GenericParam = parse_quote!('__lifted);
|
||||
|
||||
let lifted = {
|
||||
let ast = s.ast();
|
||||
let ident = &ast.ident;
|
||||
|
||||
// Replace `'tcx` lifetime by the `'__lifted` lifetime
|
||||
let (_, generics, _) = ast.generics.split_for_impl();
|
||||
let mut generics: syn::AngleBracketedGenericArguments = syn::parse_quote! { #generics };
|
||||
for arg in generics.args.iter_mut() {
|
||||
match arg {
|
||||
syn::GenericArgument::Lifetime(l) if *l == tcx => {
|
||||
*arg = parse_quote!('__lifted);
|
||||
}
|
||||
syn::GenericArgument::Type(t) => {
|
||||
*arg = syn::parse_quote! { #t::Lifted };
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
quote! { #ident #generics }
|
||||
};
|
||||
|
||||
let body = s.each_variant(|vi| {
|
||||
let bindings = &vi.bindings();
|
||||
vi.construct(|_, index| {
|
||||
let bi = &bindings[index];
|
||||
quote! { __tcx.lift(#bi)? }
|
||||
})
|
||||
});
|
||||
|
||||
s.add_impl_generic(newtcx);
|
||||
s.bound_impl(
|
||||
quote!(::rustc_middle::ty::Lift<'__lifted>),
|
||||
quote! {
|
||||
type Lifted = #lifted;
|
||||
|
||||
fn lift_to_tcx(&self, __tcx: ::rustc_middle::ty::TyCtxt<'__lifted>) -> Option<#lifted> {
|
||||
Some(match *self { #body })
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
567
compiler/rustc_macros/src/query.rs
Normal file
567
compiler/rustc_macros/src/query.rs
Normal file
|
@ -0,0 +1,567 @@
|
|||
use proc_macro::TokenStream;
|
||||
use proc_macro2::{Delimiter, TokenTree};
|
||||
use quote::quote;
|
||||
use syn::parse::{Parse, ParseStream, Result};
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::spanned::Spanned;
|
||||
use syn::{
|
||||
braced, parenthesized, parse_macro_input, Attribute, Block, Error, Expr, Ident, ReturnType,
|
||||
Token, Type,
|
||||
};
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
mod kw {
|
||||
syn::custom_keyword!(query);
|
||||
}
|
||||
|
||||
/// Ident or a wildcard `_`.
|
||||
struct IdentOrWild(Ident);
|
||||
|
||||
impl Parse for IdentOrWild {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
Ok(if input.peek(Token![_]) {
|
||||
let underscore = input.parse::<Token![_]>()?;
|
||||
IdentOrWild(Ident::new("_", underscore.span()))
|
||||
} else {
|
||||
IdentOrWild(input.parse()?)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A modifier for a query
|
||||
enum QueryModifier {
|
||||
/// The description of the query.
|
||||
Desc(Option<Ident>, Punctuated<Expr, Token![,]>),
|
||||
|
||||
/// Use this type for the in-memory cache.
|
||||
Storage(Type),
|
||||
|
||||
/// Cache the query to disk if the `Expr` returns true.
|
||||
Cache(Option<(IdentOrWild, IdentOrWild)>, Block),
|
||||
|
||||
/// Custom code to load the query from disk.
|
||||
LoadCached(Ident, Ident, Block),
|
||||
|
||||
/// A cycle error for this query aborting the compilation with a fatal error.
|
||||
FatalCycle,
|
||||
|
||||
/// A cycle error results in a delay_bug call
|
||||
CycleDelayBug,
|
||||
|
||||
/// Don't hash the result, instead just mark a query red if it runs
|
||||
NoHash,
|
||||
|
||||
/// Generate a dep node based on the dependencies of the query
|
||||
Anon,
|
||||
|
||||
/// Always evaluate the query, ignoring its dependencies
|
||||
EvalAlways,
|
||||
}
|
||||
|
||||
impl Parse for QueryModifier {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
let modifier: Ident = input.parse()?;
|
||||
if modifier == "desc" {
|
||||
// Parse a description modifier like:
|
||||
// `desc { |tcx| "foo {}", tcx.item_path(key) }`
|
||||
let attr_content;
|
||||
braced!(attr_content in input);
|
||||
let tcx = if attr_content.peek(Token![|]) {
|
||||
attr_content.parse::<Token![|]>()?;
|
||||
let tcx = attr_content.parse()?;
|
||||
attr_content.parse::<Token![|]>()?;
|
||||
Some(tcx)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let desc = attr_content.parse_terminated(Expr::parse)?;
|
||||
Ok(QueryModifier::Desc(tcx, desc))
|
||||
} else if modifier == "cache_on_disk_if" {
|
||||
// Parse a cache modifier like:
|
||||
// `cache(tcx, value) { |tcx| key.is_local() }`
|
||||
let has_args = if let TokenTree::Group(group) = input.fork().parse()? {
|
||||
group.delimiter() == Delimiter::Parenthesis
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let args = if has_args {
|
||||
let args;
|
||||
parenthesized!(args in input);
|
||||
let tcx = args.parse()?;
|
||||
args.parse::<Token![,]>()?;
|
||||
let value = args.parse()?;
|
||||
Some((tcx, value))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let block = input.parse()?;
|
||||
Ok(QueryModifier::Cache(args, block))
|
||||
} else if modifier == "load_cached" {
|
||||
// Parse a load_cached modifier like:
|
||||
// `load_cached(tcx, id) { tcx.queries.on_disk_cache.try_load_query_result(tcx, id) }`
|
||||
let args;
|
||||
parenthesized!(args in input);
|
||||
let tcx = args.parse()?;
|
||||
args.parse::<Token![,]>()?;
|
||||
let id = args.parse()?;
|
||||
let block = input.parse()?;
|
||||
Ok(QueryModifier::LoadCached(tcx, id, block))
|
||||
} else if modifier == "storage" {
|
||||
let args;
|
||||
parenthesized!(args in input);
|
||||
let ty = args.parse()?;
|
||||
Ok(QueryModifier::Storage(ty))
|
||||
} else if modifier == "fatal_cycle" {
|
||||
Ok(QueryModifier::FatalCycle)
|
||||
} else if modifier == "cycle_delay_bug" {
|
||||
Ok(QueryModifier::CycleDelayBug)
|
||||
} else if modifier == "no_hash" {
|
||||
Ok(QueryModifier::NoHash)
|
||||
} else if modifier == "anon" {
|
||||
Ok(QueryModifier::Anon)
|
||||
} else if modifier == "eval_always" {
|
||||
Ok(QueryModifier::EvalAlways)
|
||||
} else {
|
||||
Err(Error::new(modifier.span(), "unknown query modifier"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures only doc comment attributes are used
|
||||
fn check_attributes(attrs: Vec<Attribute>) -> Result<()> {
|
||||
for attr in attrs {
|
||||
if !attr.path.is_ident("doc") {
|
||||
return Err(Error::new(attr.span(), "attributes not supported on queries"));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A compiler query. `query ... { ... }`
|
||||
struct Query {
|
||||
modifiers: List<QueryModifier>,
|
||||
name: Ident,
|
||||
key: IdentOrWild,
|
||||
arg: Type,
|
||||
result: ReturnType,
|
||||
}
|
||||
|
||||
impl Parse for Query {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
check_attributes(input.call(Attribute::parse_outer)?)?;
|
||||
|
||||
// Parse the query declaration. Like `query type_of(key: DefId) -> Ty<'tcx>`
|
||||
input.parse::<kw::query>()?;
|
||||
let name: Ident = input.parse()?;
|
||||
let arg_content;
|
||||
parenthesized!(arg_content in input);
|
||||
let key = arg_content.parse()?;
|
||||
arg_content.parse::<Token![:]>()?;
|
||||
let arg = arg_content.parse()?;
|
||||
let result = input.parse()?;
|
||||
|
||||
// Parse the query modifiers
|
||||
let content;
|
||||
braced!(content in input);
|
||||
let modifiers = content.parse()?;
|
||||
|
||||
Ok(Query { modifiers, name, key, arg, result })
|
||||
}
|
||||
}
|
||||
|
||||
/// A type used to greedily parse another type until the input is empty.
|
||||
struct List<T>(Vec<T>);
|
||||
|
||||
impl<T: Parse> Parse for List<T> {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
let mut list = Vec::new();
|
||||
while !input.is_empty() {
|
||||
list.push(input.parse()?);
|
||||
}
|
||||
Ok(List(list))
|
||||
}
|
||||
}
|
||||
|
||||
/// A named group containing queries.
|
||||
struct Group {
|
||||
name: Ident,
|
||||
queries: List<Query>,
|
||||
}
|
||||
|
||||
impl Parse for Group {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
let name: Ident = input.parse()?;
|
||||
let content;
|
||||
braced!(content in input);
|
||||
Ok(Group { name, queries: content.parse()? })
|
||||
}
|
||||
}
|
||||
|
||||
struct QueryModifiers {
|
||||
/// The description of the query.
|
||||
desc: (Option<Ident>, Punctuated<Expr, Token![,]>),
|
||||
|
||||
/// Use this type for the in-memory cache.
|
||||
storage: Option<Type>,
|
||||
|
||||
/// Cache the query to disk if the `Block` returns true.
|
||||
cache: Option<(Option<(IdentOrWild, IdentOrWild)>, Block)>,
|
||||
|
||||
/// Custom code to load the query from disk.
|
||||
load_cached: Option<(Ident, Ident, Block)>,
|
||||
|
||||
/// A cycle error for this query aborting the compilation with a fatal error.
|
||||
fatal_cycle: bool,
|
||||
|
||||
/// A cycle error results in a delay_bug call
|
||||
cycle_delay_bug: bool,
|
||||
|
||||
/// Don't hash the result, instead just mark a query red if it runs
|
||||
no_hash: bool,
|
||||
|
||||
/// Generate a dep node based on the dependencies of the query
|
||||
anon: bool,
|
||||
|
||||
// Always evaluate the query, ignoring its dependencies
|
||||
eval_always: bool,
|
||||
}
|
||||
|
||||
/// Process query modifiers into a struct, erroring on duplicates
|
||||
fn process_modifiers(query: &mut Query) -> QueryModifiers {
|
||||
let mut load_cached = None;
|
||||
let mut storage = None;
|
||||
let mut cache = None;
|
||||
let mut desc = None;
|
||||
let mut fatal_cycle = false;
|
||||
let mut cycle_delay_bug = false;
|
||||
let mut no_hash = false;
|
||||
let mut anon = false;
|
||||
let mut eval_always = false;
|
||||
for modifier in query.modifiers.0.drain(..) {
|
||||
match modifier {
|
||||
QueryModifier::LoadCached(tcx, id, block) => {
|
||||
if load_cached.is_some() {
|
||||
panic!("duplicate modifier `load_cached` for query `{}`", query.name);
|
||||
}
|
||||
load_cached = Some((tcx, id, block));
|
||||
}
|
||||
QueryModifier::Storage(ty) => {
|
||||
if storage.is_some() {
|
||||
panic!("duplicate modifier `storage` for query `{}`", query.name);
|
||||
}
|
||||
storage = Some(ty);
|
||||
}
|
||||
QueryModifier::Cache(args, expr) => {
|
||||
if cache.is_some() {
|
||||
panic!("duplicate modifier `cache` for query `{}`", query.name);
|
||||
}
|
||||
cache = Some((args, expr));
|
||||
}
|
||||
QueryModifier::Desc(tcx, list) => {
|
||||
if desc.is_some() {
|
||||
panic!("duplicate modifier `desc` for query `{}`", query.name);
|
||||
}
|
||||
desc = Some((tcx, list));
|
||||
}
|
||||
QueryModifier::FatalCycle => {
|
||||
if fatal_cycle {
|
||||
panic!("duplicate modifier `fatal_cycle` for query `{}`", query.name);
|
||||
}
|
||||
fatal_cycle = true;
|
||||
}
|
||||
QueryModifier::CycleDelayBug => {
|
||||
if cycle_delay_bug {
|
||||
panic!("duplicate modifier `cycle_delay_bug` for query `{}`", query.name);
|
||||
}
|
||||
cycle_delay_bug = true;
|
||||
}
|
||||
QueryModifier::NoHash => {
|
||||
if no_hash {
|
||||
panic!("duplicate modifier `no_hash` for query `{}`", query.name);
|
||||
}
|
||||
no_hash = true;
|
||||
}
|
||||
QueryModifier::Anon => {
|
||||
if anon {
|
||||
panic!("duplicate modifier `anon` for query `{}`", query.name);
|
||||
}
|
||||
anon = true;
|
||||
}
|
||||
QueryModifier::EvalAlways => {
|
||||
if eval_always {
|
||||
panic!("duplicate modifier `eval_always` for query `{}`", query.name);
|
||||
}
|
||||
eval_always = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
let desc = desc.unwrap_or_else(|| {
|
||||
panic!("no description provided for query `{}`", query.name);
|
||||
});
|
||||
QueryModifiers {
|
||||
load_cached,
|
||||
storage,
|
||||
cache,
|
||||
desc,
|
||||
fatal_cycle,
|
||||
cycle_delay_bug,
|
||||
no_hash,
|
||||
anon,
|
||||
eval_always,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add the impl of QueryDescription for the query to `impls` if one is requested
|
||||
fn add_query_description_impl(
|
||||
query: &Query,
|
||||
modifiers: QueryModifiers,
|
||||
impls: &mut proc_macro2::TokenStream,
|
||||
) {
|
||||
let name = &query.name;
|
||||
let arg = &query.arg;
|
||||
let key = &query.key.0;
|
||||
|
||||
// Find out if we should cache the query on disk
|
||||
let cache = if let Some((args, expr)) = modifiers.cache.as_ref() {
|
||||
let try_load_from_disk = if let Some((tcx, id, block)) = modifiers.load_cached.as_ref() {
|
||||
// Use custom code to load the query from disk
|
||||
quote! {
|
||||
#[inline]
|
||||
fn try_load_from_disk(
|
||||
#tcx: TyCtxt<'tcx>,
|
||||
#id: SerializedDepNodeIndex
|
||||
) -> Option<Self::Value> {
|
||||
#block
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Use the default code to load the query from disk
|
||||
quote! {
|
||||
#[inline]
|
||||
fn try_load_from_disk(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
id: SerializedDepNodeIndex
|
||||
) -> Option<Self::Value> {
|
||||
tcx.queries.on_disk_cache.try_load_query_result(tcx, id)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let tcx = args
|
||||
.as_ref()
|
||||
.map(|t| {
|
||||
let t = &(t.0).0;
|
||||
quote! { #t }
|
||||
})
|
||||
.unwrap_or(quote! { _ });
|
||||
let value = args
|
||||
.as_ref()
|
||||
.map(|t| {
|
||||
let t = &(t.1).0;
|
||||
quote! { #t }
|
||||
})
|
||||
.unwrap_or(quote! { _ });
|
||||
// expr is a `Block`, meaning that `{ #expr }` gets expanded
|
||||
// to `{ { stmts... } }`, which triggers the `unused_braces` lint.
|
||||
quote! {
|
||||
#[inline]
|
||||
#[allow(unused_variables, unused_braces)]
|
||||
fn cache_on_disk(
|
||||
#tcx: TyCtxt<'tcx>,
|
||||
#key: &Self::Key,
|
||||
#value: Option<&Self::Value>
|
||||
) -> bool {
|
||||
#expr
|
||||
}
|
||||
|
||||
#try_load_from_disk
|
||||
}
|
||||
} else {
|
||||
if modifiers.load_cached.is_some() {
|
||||
panic!("load_cached modifier on query `{}` without a cache modifier", name);
|
||||
}
|
||||
quote! {}
|
||||
};
|
||||
|
||||
let (tcx, desc) = modifiers.desc;
|
||||
let tcx = tcx.as_ref().map(|t| quote! { #t }).unwrap_or(quote! { _ });
|
||||
|
||||
let desc = quote! {
|
||||
#[allow(unused_variables)]
|
||||
fn describe(
|
||||
#tcx: TyCtxt<'tcx>,
|
||||
#key: #arg,
|
||||
) -> Cow<'static, str> {
|
||||
format!(#desc).into()
|
||||
}
|
||||
};
|
||||
|
||||
impls.extend(quote! {
|
||||
impl<'tcx> QueryDescription<TyCtxt<'tcx>> for queries::#name<'tcx> {
|
||||
#desc
|
||||
#cache
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn rustc_queries(input: TokenStream) -> TokenStream {
|
||||
let groups = parse_macro_input!(input as List<Group>);
|
||||
|
||||
let mut query_stream = quote! {};
|
||||
let mut query_description_stream = quote! {};
|
||||
let mut dep_node_def_stream = quote! {};
|
||||
let mut dep_node_force_stream = quote! {};
|
||||
let mut try_load_from_on_disk_cache_stream = quote! {};
|
||||
let mut cached_queries = quote! {};
|
||||
|
||||
for group in groups.0 {
|
||||
let mut group_stream = quote! {};
|
||||
for mut query in group.queries.0 {
|
||||
let modifiers = process_modifiers(&mut query);
|
||||
let name = &query.name;
|
||||
let arg = &query.arg;
|
||||
let result_full = &query.result;
|
||||
let result = match query.result {
|
||||
ReturnType::Default => quote! { -> () },
|
||||
_ => quote! { #result_full },
|
||||
};
|
||||
|
||||
if modifiers.cache.is_some() {
|
||||
cached_queries.extend(quote! {
|
||||
#name,
|
||||
});
|
||||
|
||||
try_load_from_on_disk_cache_stream.extend(quote! {
|
||||
::rustc_middle::dep_graph::DepKind::#name => {
|
||||
if <#arg as DepNodeParams<TyCtxt<'_>>>::can_reconstruct_query_key() {
|
||||
debug_assert!($tcx.dep_graph
|
||||
.node_color($dep_node)
|
||||
.map(|c| c.is_green())
|
||||
.unwrap_or(false));
|
||||
|
||||
let key = <#arg as DepNodeParams<TyCtxt<'_>>>::recover($tcx, $dep_node).unwrap();
|
||||
if queries::#name::cache_on_disk($tcx, &key, None) {
|
||||
let _ = $tcx.#name(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let mut attributes = Vec::new();
|
||||
|
||||
// Pass on the fatal_cycle modifier
|
||||
if modifiers.fatal_cycle {
|
||||
attributes.push(quote! { fatal_cycle });
|
||||
};
|
||||
// Pass on the storage modifier
|
||||
if let Some(ref ty) = modifiers.storage {
|
||||
attributes.push(quote! { storage(#ty) });
|
||||
};
|
||||
// Pass on the cycle_delay_bug modifier
|
||||
if modifiers.cycle_delay_bug {
|
||||
attributes.push(quote! { cycle_delay_bug });
|
||||
};
|
||||
// Pass on the no_hash modifier
|
||||
if modifiers.no_hash {
|
||||
attributes.push(quote! { no_hash });
|
||||
};
|
||||
// Pass on the anon modifier
|
||||
if modifiers.anon {
|
||||
attributes.push(quote! { anon });
|
||||
};
|
||||
// Pass on the eval_always modifier
|
||||
if modifiers.eval_always {
|
||||
attributes.push(quote! { eval_always });
|
||||
};
|
||||
|
||||
let attribute_stream = quote! {#(#attributes),*};
|
||||
|
||||
// Add the query to the group
|
||||
group_stream.extend(quote! {
|
||||
[#attribute_stream] fn #name: #name(#arg) #result,
|
||||
});
|
||||
|
||||
// Create a dep node for the query
|
||||
dep_node_def_stream.extend(quote! {
|
||||
[#attribute_stream] #name(#arg),
|
||||
});
|
||||
|
||||
// Add a match arm to force the query given the dep node
|
||||
dep_node_force_stream.extend(quote! {
|
||||
::rustc_middle::dep_graph::DepKind::#name => {
|
||||
if <#arg as DepNodeParams<TyCtxt<'_>>>::can_reconstruct_query_key() {
|
||||
if let Some(key) = <#arg as DepNodeParams<TyCtxt<'_>>>::recover($tcx, $dep_node) {
|
||||
force_query::<crate::ty::query::queries::#name<'_>, _>(
|
||||
$tcx,
|
||||
key,
|
||||
DUMMY_SP,
|
||||
*$dep_node
|
||||
);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
add_query_description_impl(&query, modifiers, &mut query_description_stream);
|
||||
}
|
||||
let name = &group.name;
|
||||
query_stream.extend(quote! {
|
||||
#name { #group_stream },
|
||||
});
|
||||
}
|
||||
|
||||
dep_node_force_stream.extend(quote! {
|
||||
::rustc_middle::dep_graph::DepKind::Null => {
|
||||
bug!("Cannot force dep node: {:?}", $dep_node)
|
||||
}
|
||||
});
|
||||
|
||||
TokenStream::from(quote! {
|
||||
macro_rules! rustc_query_append {
|
||||
([$($macro:tt)*][$($other:tt)*]) => {
|
||||
$($macro)* {
|
||||
$($other)*
|
||||
|
||||
#query_stream
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
macro_rules! rustc_dep_node_append {
|
||||
([$($macro:tt)*][$($other:tt)*]) => {
|
||||
$($macro)*(
|
||||
$($other)*
|
||||
|
||||
#dep_node_def_stream
|
||||
);
|
||||
}
|
||||
}
|
||||
macro_rules! rustc_dep_node_force {
|
||||
([$dep_node:expr, $tcx:expr] $($other:tt)*) => {
|
||||
match $dep_node.kind {
|
||||
$($other)*
|
||||
|
||||
#dep_node_force_stream
|
||||
}
|
||||
}
|
||||
}
|
||||
macro_rules! rustc_cached_queries {
|
||||
($($macro:tt)*) => {
|
||||
$($macro)*(#cached_queries);
|
||||
}
|
||||
}
|
||||
|
||||
#query_description_stream
|
||||
|
||||
macro_rules! rustc_dep_node_try_load_from_on_disk_cache {
|
||||
($dep_node:expr, $tcx:expr) => {
|
||||
match $dep_node.kind {
|
||||
#try_load_from_on_disk_cache_stream
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
290
compiler/rustc_macros/src/serialize.rs
Normal file
290
compiler/rustc_macros/src/serialize.rs
Normal file
|
@ -0,0 +1,290 @@
|
|||
use proc_macro2::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::parse_quote;
|
||||
|
||||
pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
let decoder_ty = quote! { __D };
|
||||
if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
s.add_impl_generic(parse_quote! { 'tcx });
|
||||
}
|
||||
s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_middle::ty::codec::TyDecoder<'tcx>});
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
||||
decodable_body(s, decoder_ty)
|
||||
}
|
||||
|
||||
pub fn meta_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
s.add_impl_generic(parse_quote! { 'tcx });
|
||||
}
|
||||
s.add_impl_generic(parse_quote! { '__a });
|
||||
let decoder_ty = quote! { DecodeContext<'__a, 'tcx> };
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
||||
decodable_body(s, decoder_ty)
|
||||
}
|
||||
|
||||
pub fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
let decoder_ty = quote! { __D };
|
||||
s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_serialize::Decoder});
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
||||
decodable_body(s, decoder_ty)
|
||||
}
|
||||
|
||||
fn decodable_body(
|
||||
s: synstructure::Structure<'_>,
|
||||
decoder_ty: TokenStream,
|
||||
) -> proc_macro2::TokenStream {
|
||||
if let syn::Data::Union(_) = s.ast().data {
|
||||
panic!("cannot derive on union")
|
||||
}
|
||||
let ty_name = s.ast().ident.to_string();
|
||||
let decode_body = match s.variants() {
|
||||
[vi] => {
|
||||
let construct = vi.construct(|field, index| decode_field(field, index, true));
|
||||
let n_fields = vi.ast().fields.len();
|
||||
quote! {
|
||||
::rustc_serialize::Decoder::read_struct(
|
||||
__decoder,
|
||||
#ty_name,
|
||||
#n_fields,
|
||||
|__decoder| { ::std::result::Result::Ok(#construct) },
|
||||
)
|
||||
}
|
||||
}
|
||||
variants => {
|
||||
let match_inner: TokenStream = variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, vi)| {
|
||||
let construct = vi.construct(|field, index| decode_field(field, index, false));
|
||||
quote! { #idx => { ::std::result::Result::Ok(#construct) } }
|
||||
})
|
||||
.collect();
|
||||
let names: TokenStream = variants
|
||||
.iter()
|
||||
.map(|vi| {
|
||||
let variant_name = vi.ast().ident.to_string();
|
||||
quote!(#variant_name,)
|
||||
})
|
||||
.collect();
|
||||
let message = format!(
|
||||
"invalid enum variant tag while decoding `{}`, expected 0..{}",
|
||||
ty_name,
|
||||
variants.len()
|
||||
);
|
||||
quote! {
|
||||
::rustc_serialize::Decoder::read_enum(
|
||||
__decoder,
|
||||
#ty_name,
|
||||
|__decoder| {
|
||||
::rustc_serialize::Decoder::read_enum_variant(
|
||||
__decoder,
|
||||
&[#names],
|
||||
|__decoder, __variant_idx| {
|
||||
match __variant_idx {
|
||||
#match_inner
|
||||
_ => return ::std::result::Result::Err(
|
||||
::rustc_serialize::Decoder::error(__decoder, #message)),
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
s.bound_impl(
|
||||
quote!(::rustc_serialize::Decodable<#decoder_ty>),
|
||||
quote! {
|
||||
fn decode(
|
||||
__decoder: &mut #decoder_ty,
|
||||
) -> ::std::result::Result<Self, <#decoder_ty as ::rustc_serialize::Decoder>::Error> {
|
||||
#decode_body
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn decode_field(field: &syn::Field, index: usize, is_struct: bool) -> proc_macro2::TokenStream {
|
||||
let decode_inner_method = if let syn::Type::Reference(_) = field.ty {
|
||||
quote! { ::rustc_middle::ty::codec::RefDecodable::decode }
|
||||
} else {
|
||||
quote! { ::rustc_serialize::Decodable::decode }
|
||||
};
|
||||
let (decode_method, opt_field_name) = if is_struct {
|
||||
let field_name = field.ident.as_ref().map_or_else(|| index.to_string(), |i| i.to_string());
|
||||
(
|
||||
proc_macro2::Ident::new("read_struct_field", proc_macro2::Span::call_site()),
|
||||
quote! { #field_name, },
|
||||
)
|
||||
} else {
|
||||
(
|
||||
proc_macro2::Ident::new("read_enum_variant_arg", proc_macro2::Span::call_site()),
|
||||
quote! {},
|
||||
)
|
||||
};
|
||||
|
||||
quote! {
|
||||
match ::rustc_serialize::Decoder::#decode_method(
|
||||
__decoder, #opt_field_name #index, #decode_inner_method) {
|
||||
::std::result::Result::Ok(__res) => __res,
|
||||
::std::result::Result::Err(__err) => return ::std::result::Result::Err(__err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
s.add_impl_generic(parse_quote! {'tcx});
|
||||
}
|
||||
let encoder_ty = quote! { __E };
|
||||
s.add_impl_generic(parse_quote! {#encoder_ty: ::rustc_middle::ty::codec::TyEncoder<'tcx>});
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
||||
encodable_body(s, encoder_ty, false)
|
||||
}
|
||||
|
||||
pub fn meta_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
s.add_impl_generic(parse_quote! {'tcx});
|
||||
}
|
||||
s.add_impl_generic(parse_quote! { '__a });
|
||||
let encoder_ty = quote! { EncodeContext<'__a, 'tcx> };
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
||||
encodable_body(s, encoder_ty, true)
|
||||
}
|
||||
|
||||
pub fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
let encoder_ty = quote! { __E };
|
||||
s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_serialize::Encoder});
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
||||
encodable_body(s, encoder_ty, false)
|
||||
}
|
||||
|
||||
fn encodable_body(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
encoder_ty: TokenStream,
|
||||
allow_unreachable_code: bool,
|
||||
) -> proc_macro2::TokenStream {
|
||||
if let syn::Data::Union(_) = s.ast().data {
|
||||
panic!("cannot derive on union")
|
||||
}
|
||||
|
||||
s.bind_with(|binding| {
|
||||
// Handle the lack of a blanket reference impl.
|
||||
if let syn::Type::Reference(_) = binding.ast().ty {
|
||||
synstructure::BindStyle::Move
|
||||
} else {
|
||||
synstructure::BindStyle::Ref
|
||||
}
|
||||
});
|
||||
|
||||
let ty_name = s.ast().ident.to_string();
|
||||
let encode_body = match s.variants() {
|
||||
[_] => {
|
||||
let mut field_idx = 0usize;
|
||||
let encode_inner = s.each_variant(|vi| {
|
||||
vi.bindings()
|
||||
.iter()
|
||||
.map(|binding| {
|
||||
let bind_ident = &binding.binding;
|
||||
let field_name = binding
|
||||
.ast()
|
||||
.ident
|
||||
.as_ref()
|
||||
.map_or_else(|| field_idx.to_string(), |i| i.to_string());
|
||||
let result = quote! {
|
||||
match ::rustc_serialize::Encoder::emit_struct_field(
|
||||
__encoder,
|
||||
#field_name,
|
||||
#field_idx,
|
||||
|__encoder|
|
||||
::rustc_serialize::Encodable::encode(#bind_ident, __encoder),
|
||||
) {
|
||||
::std::result::Result::Ok(()) => (),
|
||||
::std::result::Result::Err(__err)
|
||||
=> return ::std::result::Result::Err(__err),
|
||||
}
|
||||
};
|
||||
field_idx += 1;
|
||||
result
|
||||
})
|
||||
.collect::<TokenStream>()
|
||||
});
|
||||
quote! {
|
||||
::rustc_serialize::Encoder::emit_struct(__encoder, #ty_name, #field_idx, |__encoder| {
|
||||
::std::result::Result::Ok(match *self { #encode_inner })
|
||||
})
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let mut variant_idx = 0usize;
|
||||
let encode_inner = s.each_variant(|vi| {
|
||||
let variant_name = vi.ast().ident.to_string();
|
||||
let mut field_idx = 0usize;
|
||||
|
||||
let encode_fields: TokenStream = vi
|
||||
.bindings()
|
||||
.iter()
|
||||
.map(|binding| {
|
||||
let bind_ident = &binding.binding;
|
||||
let result = quote! {
|
||||
match ::rustc_serialize::Encoder::emit_enum_variant_arg(
|
||||
__encoder,
|
||||
#field_idx,
|
||||
|__encoder|
|
||||
::rustc_serialize::Encodable::encode(#bind_ident, __encoder),
|
||||
) {
|
||||
::std::result::Result::Ok(()) => (),
|
||||
::std::result::Result::Err(__err)
|
||||
=> return ::std::result::Result::Err(__err),
|
||||
}
|
||||
};
|
||||
field_idx += 1;
|
||||
result
|
||||
})
|
||||
.collect();
|
||||
|
||||
let result = quote! { ::rustc_serialize::Encoder::emit_enum_variant(
|
||||
__encoder,
|
||||
#variant_name,
|
||||
#variant_idx,
|
||||
#field_idx,
|
||||
|__encoder| { ::std::result::Result::Ok({ #encode_fields }) }
|
||||
) };
|
||||
variant_idx += 1;
|
||||
result
|
||||
});
|
||||
quote! {
|
||||
::rustc_serialize::Encoder::emit_enum(__encoder, #ty_name, |__encoder| {
|
||||
match *self {
|
||||
#encode_inner
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let lints = if allow_unreachable_code {
|
||||
quote! { #![allow(unreachable_code)] }
|
||||
} else {
|
||||
quote! {}
|
||||
};
|
||||
|
||||
s.bound_impl(
|
||||
quote!(::rustc_serialize::Encodable<#encoder_ty>),
|
||||
quote! {
|
||||
fn encode(
|
||||
&self,
|
||||
__encoder: &mut #encoder_ty,
|
||||
) -> ::std::result::Result<(), <#encoder_ty as ::rustc_serialize::Encoder>::Error> {
|
||||
#lints
|
||||
#encode_body
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
195
compiler/rustc_macros/src/symbols.rs
Normal file
195
compiler/rustc_macros/src/symbols.rs
Normal file
|
@ -0,0 +1,195 @@
|
|||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use std::collections::HashSet;
|
||||
use syn::parse::{Parse, ParseStream, Result};
|
||||
use syn::{braced, parse_macro_input, Ident, LitStr, Token};
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
mod kw {
|
||||
syn::custom_keyword!(Keywords);
|
||||
syn::custom_keyword!(Symbols);
|
||||
}
|
||||
|
||||
struct Keyword {
|
||||
name: Ident,
|
||||
value: LitStr,
|
||||
}
|
||||
|
||||
impl Parse for Keyword {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
let name = input.parse()?;
|
||||
input.parse::<Token![:]>()?;
|
||||
let value = input.parse()?;
|
||||
input.parse::<Token![,]>()?;
|
||||
|
||||
Ok(Keyword { name, value })
|
||||
}
|
||||
}
|
||||
|
||||
struct Symbol {
|
||||
name: Ident,
|
||||
value: Option<LitStr>,
|
||||
}
|
||||
|
||||
impl Parse for Symbol {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
let name = input.parse()?;
|
||||
let value = match input.parse::<Token![:]>() {
|
||||
Ok(_) => Some(input.parse()?),
|
||||
Err(_) => None,
|
||||
};
|
||||
input.parse::<Token![,]>()?;
|
||||
|
||||
Ok(Symbol { name, value })
|
||||
}
|
||||
}
|
||||
|
||||
/// A type used to greedily parse another type until the input is empty.
|
||||
struct List<T>(Vec<T>);
|
||||
|
||||
impl<T: Parse> Parse for List<T> {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
let mut list = Vec::new();
|
||||
while !input.is_empty() {
|
||||
list.push(input.parse()?);
|
||||
}
|
||||
Ok(List(list))
|
||||
}
|
||||
}
|
||||
|
||||
struct Input {
|
||||
keywords: List<Keyword>,
|
||||
symbols: List<Symbol>,
|
||||
}
|
||||
|
||||
impl Parse for Input {
|
||||
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
||||
input.parse::<kw::Keywords>()?;
|
||||
let content;
|
||||
braced!(content in input);
|
||||
let keywords = content.parse()?;
|
||||
|
||||
input.parse::<kw::Symbols>()?;
|
||||
let content;
|
||||
braced!(content in input);
|
||||
let symbols = content.parse()?;
|
||||
|
||||
Ok(Input { keywords, symbols })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn symbols(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as Input);
|
||||
|
||||
let mut keyword_stream = quote! {};
|
||||
let mut symbols_stream = quote! {};
|
||||
let mut digits_stream = quote! {};
|
||||
let mut prefill_stream = quote! {};
|
||||
let mut counter = 0u32;
|
||||
let mut keys = HashSet::<String>::new();
|
||||
let mut prev_key: Option<String> = None;
|
||||
let mut errors = Vec::<String>::new();
|
||||
|
||||
let mut check_dup = |str: &str, errors: &mut Vec<String>| {
|
||||
if !keys.insert(str.to_string()) {
|
||||
errors.push(format!("Symbol `{}` is duplicated", str));
|
||||
}
|
||||
};
|
||||
|
||||
let mut check_order = |str: &str, errors: &mut Vec<String>| {
|
||||
if let Some(ref prev_str) = prev_key {
|
||||
if str < prev_str {
|
||||
errors.push(format!("Symbol `{}` must precede `{}`", str, prev_str));
|
||||
}
|
||||
}
|
||||
prev_key = Some(str.to_string());
|
||||
};
|
||||
|
||||
// Generate the listed keywords.
|
||||
for keyword in &input.keywords.0 {
|
||||
let name = &keyword.name;
|
||||
let value = &keyword.value;
|
||||
check_dup(&value.value(), &mut errors);
|
||||
prefill_stream.extend(quote! {
|
||||
#value,
|
||||
});
|
||||
keyword_stream.extend(quote! {
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const #name: Symbol = Symbol::new(#counter);
|
||||
});
|
||||
counter += 1;
|
||||
}
|
||||
|
||||
// Generate the listed symbols.
|
||||
for symbol in &input.symbols.0 {
|
||||
let name = &symbol.name;
|
||||
let value = match &symbol.value {
|
||||
Some(value) => value.value(),
|
||||
None => name.to_string(),
|
||||
};
|
||||
check_dup(&value, &mut errors);
|
||||
check_order(&name.to_string(), &mut errors);
|
||||
prefill_stream.extend(quote! {
|
||||
#value,
|
||||
});
|
||||
symbols_stream.extend(quote! {
|
||||
#[allow(rustc::default_hash_types)]
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const #name: Symbol = Symbol::new(#counter);
|
||||
});
|
||||
counter += 1;
|
||||
}
|
||||
|
||||
// Generate symbols for the strings "0", "1", ..., "9".
|
||||
for n in 0..10 {
|
||||
let n = n.to_string();
|
||||
check_dup(&n, &mut errors);
|
||||
prefill_stream.extend(quote! {
|
||||
#n,
|
||||
});
|
||||
digits_stream.extend(quote! {
|
||||
Symbol::new(#counter),
|
||||
});
|
||||
counter += 1;
|
||||
}
|
||||
|
||||
if !errors.is_empty() {
|
||||
for error in errors.into_iter() {
|
||||
eprintln!("error: {}", error)
|
||||
}
|
||||
panic!("errors in `Keywords` and/or `Symbols`");
|
||||
}
|
||||
|
||||
let tt = TokenStream::from(quote! {
|
||||
macro_rules! keywords {
|
||||
() => {
|
||||
#keyword_stream
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! define_symbols {
|
||||
() => {
|
||||
#symbols_stream
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const digits_array: &[Symbol; 10] = &[
|
||||
#digits_stream
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
impl Interner {
|
||||
pub fn fresh() -> Self {
|
||||
Interner::prefill(&[
|
||||
#prefill_stream
|
||||
])
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// To see the generated code generated, uncomment this line, recompile, and
|
||||
// run the resulting output through `rustfmt`.
|
||||
//eprintln!("{}", tt);
|
||||
|
||||
tt
|
||||
}
|
40
compiler/rustc_macros/src/type_foldable.rs
Normal file
40
compiler/rustc_macros/src/type_foldable.rs
Normal file
|
@ -0,0 +1,40 @@
|
|||
use quote::quote;
|
||||
|
||||
pub fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
if let syn::Data::Union(_) = s.ast().data {
|
||||
panic!("cannot derive on union")
|
||||
}
|
||||
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
let body_fold = s.each_variant(|vi| {
|
||||
let bindings = vi.bindings();
|
||||
vi.construct(|_, index| {
|
||||
let bind = &bindings[index];
|
||||
quote! {
|
||||
::rustc_middle::ty::fold::TypeFoldable::fold_with(#bind, __folder)
|
||||
}
|
||||
})
|
||||
});
|
||||
let body_visit = s.fold(false, |acc, bind| {
|
||||
quote! { #acc || ::rustc_middle::ty::fold::TypeFoldable::visit_with(#bind, __folder) }
|
||||
});
|
||||
|
||||
s.bound_impl(
|
||||
quote!(::rustc_middle::ty::fold::TypeFoldable<'tcx>),
|
||||
quote! {
|
||||
fn super_fold_with<__F: ::rustc_middle::ty::fold::TypeFolder<'tcx>>(
|
||||
&self,
|
||||
__folder: &mut __F
|
||||
) -> Self {
|
||||
match *self { #body_fold }
|
||||
}
|
||||
|
||||
fn super_visit_with<__F: ::rustc_middle::ty::fold::TypeVisitor<'tcx>>(
|
||||
&self,
|
||||
__folder: &mut __F
|
||||
) -> bool {
|
||||
match *self { #body_visit }
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue