Auto merge of #66279 - cjgillot:hashstable, r=Zoxc
Use proc-macro to derive HashStable everywhere Hello, A second proc-macro is added to derive HashStable for crates librustc depends on. This proc-macro `HashStable_Generic` (to bikeshed) allows to decouple code and strip much of librustc's boilerplate. Still, two implementations `Span` and `TokenKind` require to be placed in librustc. The latter only depends on the `bug` macro. Advise welcome on how to sever that link. A trait `StableHasingContextLike` has been introduced at each crate root, in order to handle those implementations which require librustc's very `StableHashingContext`. This overall effort allowed to remove the `impl_stable_hash_for` macro. Each commit passes the `x.py check`. I still have to double check there was no change in the implementation.
This commit is contained in:
commit
582a4eaee6
18 changed files with 199 additions and 427 deletions
|
@ -2,26 +2,23 @@ use crate::hir;
|
||||||
use crate::hir::def_id::{DefId, DefIndex};
|
use crate::hir::def_id::{DefId, DefIndex};
|
||||||
use crate::hir::map::DefPathHash;
|
use crate::hir::map::DefPathHash;
|
||||||
use crate::hir::map::definitions::Definitions;
|
use crate::hir::map::definitions::Definitions;
|
||||||
use crate::ich::{self, CachingSourceMapView, Fingerprint};
|
use crate::ich::{self, CachingSourceMapView};
|
||||||
use crate::middle::cstore::CrateStore;
|
use crate::middle::cstore::CrateStore;
|
||||||
use crate::ty::{TyCtxt, fast_reject};
|
use crate::ty::{TyCtxt, fast_reject};
|
||||||
use crate::session::Session;
|
use crate::session::Session;
|
||||||
|
|
||||||
use std::cmp::Ord;
|
use std::cmp::Ord;
|
||||||
use std::hash as std_hash;
|
|
||||||
use std::cell::RefCell;
|
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::source_map::SourceMap;
|
use syntax::source_map::SourceMap;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
use syntax::tokenstream::DelimSpan;
|
use syntax_pos::{SourceFile, BytePos};
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
|
||||||
use syntax_pos::hygiene::{self, SyntaxContext};
|
|
||||||
|
|
||||||
use rustc_data_structures::stable_hasher::{
|
use rustc_data_structures::stable_hasher::{
|
||||||
HashStable, StableHasher, ToStableHashKey,
|
HashStable, StableHasher, ToStableHashKey,
|
||||||
};
|
};
|
||||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
|
fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
|
||||||
|
@ -281,93 +278,15 @@ impl<'a> ToStableHashKey<StableHashingContext<'a>> for ast::NodeId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for Span {
|
impl<'a> syntax_pos::HashStableContext for StableHashingContext<'a> {
|
||||||
/// Hashes a span in a stable way. We can't directly hash the span's `BytePos`
|
fn hash_spans(&self) -> bool {
|
||||||
/// fields (that would be similar to hashing pointers, since those are just
|
self.hash_spans
|
||||||
/// offsets into the `SourceMap`). Instead, we hash the (file name, line, column)
|
|
||||||
/// triple, which stays the same even if the containing `SourceFile` has moved
|
|
||||||
/// within the `SourceMap`.
|
|
||||||
/// Also note that we are hashing byte offsets for the column, not unicode
|
|
||||||
/// codepoint offsets. For the purpose of the hash that's sufficient.
|
|
||||||
/// Also, hashing filenames is expensive so we avoid doing it twice when the
|
|
||||||
/// span starts and ends in the same file, which is almost always the case.
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
const TAG_VALID_SPAN: u8 = 0;
|
|
||||||
const TAG_INVALID_SPAN: u8 = 1;
|
|
||||||
const TAG_EXPANSION: u8 = 0;
|
|
||||||
const TAG_NO_EXPANSION: u8 = 1;
|
|
||||||
|
|
||||||
if !hcx.hash_spans {
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if *self == DUMMY_SP {
|
fn byte_pos_to_line_and_col(&mut self, byte: BytePos)
|
||||||
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
-> Option<(Lrc<SourceFile>, usize, BytePos)>
|
||||||
}
|
{
|
||||||
|
self.source_map().byte_pos_to_line_and_col(byte)
|
||||||
// If this is not an empty or invalid span, we want to hash the last
|
|
||||||
// position that belongs to it, as opposed to hashing the first
|
|
||||||
// position past it.
|
|
||||||
let span = self.data();
|
|
||||||
let (file_lo, line_lo, col_lo) = match hcx.source_map()
|
|
||||||
.byte_pos_to_line_and_col(span.lo) {
|
|
||||||
Some(pos) => pos,
|
|
||||||
None => {
|
|
||||||
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if !file_lo.contains(span.hi) {
|
|
||||||
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
|
||||||
}
|
|
||||||
|
|
||||||
std_hash::Hash::hash(&TAG_VALID_SPAN, hasher);
|
|
||||||
// We truncate the stable ID hash and line and column numbers. The chances
|
|
||||||
// of causing a collision this way should be minimal.
|
|
||||||
std_hash::Hash::hash(&(file_lo.name_hash as u64), hasher);
|
|
||||||
|
|
||||||
let col = (col_lo.0 as u64) & 0xFF;
|
|
||||||
let line = ((line_lo as u64) & 0xFF_FF_FF) << 8;
|
|
||||||
let len = ((span.hi - span.lo).0 as u64) << 32;
|
|
||||||
let line_col_len = col | line | len;
|
|
||||||
std_hash::Hash::hash(&line_col_len, hasher);
|
|
||||||
|
|
||||||
if span.ctxt == SyntaxContext::root() {
|
|
||||||
TAG_NO_EXPANSION.hash_stable(hcx, hasher);
|
|
||||||
} else {
|
|
||||||
TAG_EXPANSION.hash_stable(hcx, hasher);
|
|
||||||
|
|
||||||
// Since the same expansion context is usually referenced many
|
|
||||||
// times, we cache a stable hash of it and hash that instead of
|
|
||||||
// recursing every time.
|
|
||||||
thread_local! {
|
|
||||||
static CACHE: RefCell<FxHashMap<hygiene::ExpnId, u64>> = Default::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
let sub_hash: u64 = CACHE.with(|cache| {
|
|
||||||
let expn_id = span.ctxt.outer_expn();
|
|
||||||
|
|
||||||
if let Some(&sub_hash) = cache.borrow().get(&expn_id) {
|
|
||||||
return sub_hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut hasher = StableHasher::new();
|
|
||||||
expn_id.expn_data().hash_stable(hcx, &mut hasher);
|
|
||||||
let sub_hash: Fingerprint = hasher.finish();
|
|
||||||
let sub_hash = sub_hash.to_smaller_hash();
|
|
||||||
cache.borrow_mut().insert(expn_id, sub_hash);
|
|
||||||
sub_hash
|
|
||||||
});
|
|
||||||
|
|
||||||
sub_hash.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for DelimSpan {
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
self.open.hash_stable(hcx, hasher);
|
|
||||||
self.close.hash_stable(hcx, hasher);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ use crate::ich::{StableHashingContext, NodeIdHashingMode, Fingerprint};
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use syntax::ast;
|
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for DefId {
|
impl<'a> HashStable<StableHashingContext<'a>> for DefId {
|
||||||
|
@ -119,10 +118,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::ImplItemId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_stable_hash_for!(struct ast::Label {
|
|
||||||
ident
|
|
||||||
});
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for hir::Ty {
|
impl<'a> HashStable<StableHashingContext<'a>> for hir::Ty {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
hcx.while_hashing_hir_bodies(true, |hcx| {
|
hcx.while_hashing_hir_bodies(true, |hcx| {
|
||||||
|
@ -138,10 +133,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::Ty {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_stable_hash_for_spanned!(hir::BinOpKind);
|
|
||||||
|
|
||||||
impl_stable_hash_for_spanned!(ast::Name);
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for hir::Expr {
|
impl<'a> HashStable<StableHashingContext<'a>> for hir::Expr {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
hcx.while_hashing_hir_bodies(true, |hcx| {
|
hcx.while_hashing_hir_bodies(true, |hcx| {
|
||||||
|
@ -159,13 +150,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::Expr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_stable_hash_for_spanned!(usize);
|
|
||||||
|
|
||||||
impl_stable_hash_for!(struct ast::Ident {
|
|
||||||
name,
|
|
||||||
span,
|
|
||||||
});
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitItem {
|
impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitItem {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
let hir::TraitItem {
|
let hir::TraitItem {
|
||||||
|
@ -234,8 +218,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::VisibilityKind {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_stable_hash_for_spanned!(hir::VisibilityKind);
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for hir::Mod {
|
impl<'a> HashStable<StableHashingContext<'a>> for hir::Mod {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
let hir::Mod {
|
let hir::Mod {
|
||||||
|
@ -263,9 +245,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::Mod {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_stable_hash_for_spanned!(hir::Variant);
|
|
||||||
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for hir::Item {
|
impl<'a> HashStable<StableHashingContext<'a>> for hir::Item {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
let hir::Item {
|
let hir::Item {
|
||||||
|
|
|
@ -3,13 +3,8 @@
|
||||||
|
|
||||||
use crate::ich::StableHashingContext;
|
use crate::ich::StableHashingContext;
|
||||||
|
|
||||||
use std::hash as std_hash;
|
|
||||||
use std::mem;
|
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::feature_gate;
|
use syntax::feature_gate;
|
||||||
use syntax::token;
|
|
||||||
use syntax::tokenstream;
|
|
||||||
use syntax_pos::SourceFile;
|
use syntax_pos::SourceFile;
|
||||||
|
|
||||||
use crate::hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
|
use crate::hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
|
||||||
|
@ -17,15 +12,14 @@ use crate::hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
|
|
||||||
impl_stable_hash_for!(struct ::syntax::ast::Lit {
|
impl<'ctx> rustc_target::HashStableContext for StableHashingContext<'ctx> {}
|
||||||
kind,
|
|
||||||
token,
|
|
||||||
span
|
|
||||||
});
|
|
||||||
|
|
||||||
impl_stable_hash_for_spanned!(::syntax::ast::LitKind);
|
impl<'a> HashStable<StableHashingContext<'a>> for ast::Lifetime {
|
||||||
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
impl_stable_hash_for!(struct ::syntax::ast::Lifetime { id, ident });
|
self.id.hash_stable(hcx, hasher);
|
||||||
|
self.ident.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
|
impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
|
@ -50,20 +44,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for ast::Path {
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
self.segments.len().hash_stable(hcx, hasher);
|
|
||||||
for segment in &self.segments {
|
|
||||||
segment.ident.name.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl_stable_hash_for!(struct ::syntax::ast::AttrItem {
|
|
||||||
path,
|
|
||||||
tokens,
|
|
||||||
});
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for ast::Attribute {
|
impl<'a> HashStable<StableHashingContext<'a>> for ast::Attribute {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
// Make sure that these have been filtered out.
|
// Make sure that these have been filtered out.
|
||||||
|
@ -81,129 +61,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for ast::Attribute {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>>
|
impl<'ctx> syntax::HashStableContext for StableHashingContext<'ctx> {}
|
||||||
for tokenstream::TokenTree {
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
|
||||||
match *self {
|
|
||||||
tokenstream::TokenTree::Token(ref token) => {
|
|
||||||
token.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
|
|
||||||
span.hash_stable(hcx, hasher);
|
|
||||||
std_hash::Hash::hash(&delim, hasher);
|
|
||||||
for sub_tt in tts.trees() {
|
|
||||||
sub_tt.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>>
|
|
||||||
for tokenstream::TokenStream {
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
for sub_tt in self.trees() {
|
|
||||||
sub_tt.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
|
||||||
match *self {
|
|
||||||
token::Eq |
|
|
||||||
token::Lt |
|
|
||||||
token::Le |
|
|
||||||
token::EqEq |
|
|
||||||
token::Ne |
|
|
||||||
token::Ge |
|
|
||||||
token::Gt |
|
|
||||||
token::AndAnd |
|
|
||||||
token::OrOr |
|
|
||||||
token::Not |
|
|
||||||
token::Tilde |
|
|
||||||
token::At |
|
|
||||||
token::Dot |
|
|
||||||
token::DotDot |
|
|
||||||
token::DotDotDot |
|
|
||||||
token::DotDotEq |
|
|
||||||
token::Comma |
|
|
||||||
token::Semi |
|
|
||||||
token::Colon |
|
|
||||||
token::ModSep |
|
|
||||||
token::RArrow |
|
|
||||||
token::LArrow |
|
|
||||||
token::FatArrow |
|
|
||||||
token::Pound |
|
|
||||||
token::Dollar |
|
|
||||||
token::Question |
|
|
||||||
token::SingleQuote |
|
|
||||||
token::Whitespace |
|
|
||||||
token::Comment |
|
|
||||||
token::Eof => {}
|
|
||||||
|
|
||||||
token::BinOp(bin_op_token) |
|
|
||||||
token::BinOpEq(bin_op_token) => {
|
|
||||||
std_hash::Hash::hash(&bin_op_token, hasher);
|
|
||||||
}
|
|
||||||
|
|
||||||
token::OpenDelim(delim_token) |
|
|
||||||
token::CloseDelim(delim_token) => {
|
|
||||||
std_hash::Hash::hash(&delim_token, hasher);
|
|
||||||
}
|
|
||||||
token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
|
||||||
|
|
||||||
token::Ident(name, is_raw) => {
|
|
||||||
name.hash_stable(hcx, hasher);
|
|
||||||
is_raw.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
token::Lifetime(name) => name.hash_stable(hcx, hasher),
|
|
||||||
|
|
||||||
token::Interpolated(_) => {
|
|
||||||
bug!("interpolated tokens should not be present in the HIR")
|
|
||||||
}
|
|
||||||
|
|
||||||
token::DocComment(val) |
|
|
||||||
token::Shebang(val) |
|
|
||||||
token::Unknown(val) => val.hash_stable(hcx, hasher),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl_stable_hash_for!(struct token::Token {
|
|
||||||
kind,
|
|
||||||
span
|
|
||||||
});
|
|
||||||
|
|
||||||
impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
|
|
||||||
MetaItem(meta_item),
|
|
||||||
Literal(lit)
|
|
||||||
});
|
|
||||||
|
|
||||||
impl_stable_hash_for!(struct ::syntax::ast::MetaItem {
|
|
||||||
path,
|
|
||||||
kind,
|
|
||||||
span
|
|
||||||
});
|
|
||||||
|
|
||||||
impl_stable_hash_for!(enum ::syntax::ast::MetaItemKind {
|
|
||||||
Word,
|
|
||||||
List(nested_items),
|
|
||||||
NameValue(lit)
|
|
||||||
});
|
|
||||||
|
|
||||||
impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnData {
|
|
||||||
kind,
|
|
||||||
parent -> _,
|
|
||||||
call_site,
|
|
||||||
def_site,
|
|
||||||
allow_internal_unstable,
|
|
||||||
allow_internal_unsafe,
|
|
||||||
local_inner_macros,
|
|
||||||
edition
|
|
||||||
});
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
|
impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
//! ICH - Incremental Compilation Hash
|
//! ICH - Incremental Compilation Hash
|
||||||
|
|
||||||
crate use rustc_data_structures::fingerprint::Fingerprint;
|
crate use rustc_data_structures::fingerprint::Fingerprint;
|
||||||
pub use self::caching_source_map_view::CachingSourceMapView;
|
pub use syntax_pos::CachingSourceMapView;
|
||||||
pub use self::hcx::{StableHashingContextProvider, StableHashingContext, NodeIdHashingMode,
|
pub use self::hcx::{StableHashingContextProvider, StableHashingContext, NodeIdHashingMode,
|
||||||
hash_stable_trait_impls};
|
hash_stable_trait_impls};
|
||||||
use syntax::symbol::{Symbol, sym};
|
use syntax::symbol::{Symbol, sym};
|
||||||
|
|
||||||
mod caching_source_map_view;
|
|
||||||
mod hcx;
|
mod hcx;
|
||||||
|
|
||||||
mod impls_hir;
|
mod impls_hir;
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
// ignore-tidy-linelength
|
|
||||||
|
|
||||||
macro_rules! enum_from_u32 {
|
macro_rules! enum_from_u32 {
|
||||||
($(#[$attr:meta])* pub enum $name:ident {
|
($(#[$attr:meta])* pub enum $name:ident {
|
||||||
$($variant:ident = $e:expr,)*
|
$($variant:ident = $e:expr,)*
|
||||||
|
@ -52,137 +50,6 @@ macro_rules! span_bug {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! __impl_stable_hash_field {
|
|
||||||
($field:ident, $ctx:expr, $hasher:expr) => ($field.hash_stable($ctx, $hasher));
|
|
||||||
($field:ident, $ctx:expr, $hasher:expr, _) => ({ let _ = $field; });
|
|
||||||
($field:ident, $ctx:expr, $hasher:expr, $delegate:expr) => ($delegate.hash_stable($ctx, $hasher));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! impl_stable_hash_for {
|
|
||||||
// Enums
|
|
||||||
(enum $enum_name:path {
|
|
||||||
$( $variant:ident
|
|
||||||
// this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
|
|
||||||
// when it should be only one or the other
|
|
||||||
$( ( $($field:ident $(-> $delegate:tt)?),* ) )?
|
|
||||||
$( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
|
|
||||||
),* $(,)?
|
|
||||||
}) => {
|
|
||||||
impl_stable_hash_for!(
|
|
||||||
impl<> for enum $enum_name [ $enum_name ] { $( $variant
|
|
||||||
$( ( $($field $(-> $delegate)?),* ) )?
|
|
||||||
$( { $($named_field $(-> $named_delegate)?),* } )?
|
|
||||||
),* }
|
|
||||||
);
|
|
||||||
};
|
|
||||||
// We want to use the enum name both in the `impl ... for $enum_name` as well as for
|
|
||||||
// importing all the variants. Unfortunately it seems we have to take the name
|
|
||||||
// twice for this purpose
|
|
||||||
(impl<$($T:ident),* $(,)?>
|
|
||||||
for enum $enum_name:path
|
|
||||||
[ $enum_path:path ]
|
|
||||||
{
|
|
||||||
$( $variant:ident
|
|
||||||
// this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
|
|
||||||
// when it should be only one or the other
|
|
||||||
$( ( $($field:ident $(-> $delegate:tt)?),* ) )?
|
|
||||||
$( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
|
|
||||||
),* $(,)?
|
|
||||||
}) => {
|
|
||||||
impl<$($T,)*>
|
|
||||||
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>
|
|
||||||
for $enum_name
|
|
||||||
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
|
|
||||||
{
|
|
||||||
#[inline]
|
|
||||||
fn hash_stable(&self,
|
|
||||||
__ctx: &mut $crate::ich::StableHashingContext<'a>,
|
|
||||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) {
|
|
||||||
use $enum_path::*;
|
|
||||||
::std::mem::discriminant(self).hash_stable(__ctx, __hasher);
|
|
||||||
|
|
||||||
match *self {
|
|
||||||
$(
|
|
||||||
$variant $( ( $(ref $field),* ) )? $( { $(ref $named_field),* } )? => {
|
|
||||||
$($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)?
|
|
||||||
$($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)?) );*)?
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
// Structs
|
|
||||||
(struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
|
|
||||||
impl_stable_hash_for!(
|
|
||||||
impl<> for struct $struct_name { $($field $(-> $delegate)?),* }
|
|
||||||
);
|
|
||||||
};
|
|
||||||
(impl<$($T:ident),* $(,)?> for struct $struct_name:path {
|
|
||||||
$($field:ident $(-> $delegate:tt)?),* $(,)?
|
|
||||||
}) => {
|
|
||||||
impl<$($T,)*>
|
|
||||||
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
|
|
||||||
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
|
|
||||||
{
|
|
||||||
#[inline]
|
|
||||||
fn hash_stable(&self,
|
|
||||||
__ctx: &mut $crate::ich::StableHashingContext<'a>,
|
|
||||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) {
|
|
||||||
let $struct_name {
|
|
||||||
$(ref $field),*
|
|
||||||
} = *self;
|
|
||||||
|
|
||||||
$( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
// Tuple structs
|
|
||||||
// We cannot use normal parentheses here, the parser won't allow it
|
|
||||||
(tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
|
|
||||||
impl_stable_hash_for!(
|
|
||||||
impl<> for tuple_struct $struct_name { $($field $(-> $delegate)?),* }
|
|
||||||
);
|
|
||||||
};
|
|
||||||
(impl<$($T:ident),* $(,)?>
|
|
||||||
for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
|
|
||||||
impl<$($T,)*>
|
|
||||||
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
|
|
||||||
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
|
|
||||||
{
|
|
||||||
#[inline]
|
|
||||||
fn hash_stable(&self,
|
|
||||||
__ctx: &mut $crate::ich::StableHashingContext<'a>,
|
|
||||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher) {
|
|
||||||
let $struct_name (
|
|
||||||
$(ref $field),*
|
|
||||||
) = *self;
|
|
||||||
|
|
||||||
$( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! impl_stable_hash_for_spanned {
|
|
||||||
($T:path) => (
|
|
||||||
|
|
||||||
impl HashStable<StableHashingContext<'a>> for ::syntax::source_map::Spanned<$T>
|
|
||||||
{
|
|
||||||
#[inline]
|
|
||||||
fn hash_stable(&self,
|
|
||||||
hcx: &mut StableHashingContext<'a>,
|
|
||||||
hasher: &mut StableHasher) {
|
|
||||||
self.node.hash_stable(hcx, hasher);
|
|
||||||
self.span.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// Lift and TypeFoldable macros
|
// Lift and TypeFoldable macros
|
||||||
//
|
//
|
||||||
|
|
|
@ -51,6 +51,7 @@ pub fn hash_stable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_ma
|
||||||
let generic: syn::GenericParam = parse_quote!(__CTX);
|
let generic: syn::GenericParam = parse_quote!(__CTX);
|
||||||
s.add_bounds(synstructure::AddBounds::Generics);
|
s.add_bounds(synstructure::AddBounds::Generics);
|
||||||
s.add_impl_generic(generic);
|
s.add_impl_generic(generic);
|
||||||
|
s.add_where_predicate(parse_quote!{ __CTX: crate::HashStableContext });
|
||||||
let body = s.each(|bi| {
|
let body = s.each(|bi| {
|
||||||
let attrs = parse_attributes(bi.ast());
|
let attrs = parse_attributes(bi.ast());
|
||||||
if attrs.ignore {
|
if attrs.ignore {
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::fmt::Write;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
use syntax::source_map::{self, Span, DUMMY_SP};
|
use syntax::source_map::{self, Span, DUMMY_SP};
|
||||||
|
use rustc::ich::StableHashingContext;
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::def::DefKind;
|
use rustc::hir::def::DefKind;
|
||||||
use rustc::mir;
|
use rustc::mir;
|
||||||
|
@ -18,6 +19,7 @@ use rustc::mir::interpret::{
|
||||||
InterpResult, truncate, sign_extend,
|
InterpResult, truncate, sign_extend,
|
||||||
};
|
};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_macros::HashStable;
|
use rustc_macros::HashStable;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
@ -829,3 +831,21 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
frames
|
frames
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'ctx, 'mir, 'tcx, Tag, Extra> HashStable<StableHashingContext<'ctx>>
|
||||||
|
for Frame<'mir, 'tcx, Tag, Extra>
|
||||||
|
where Extra: HashStable<StableHashingContext<'ctx>>,
|
||||||
|
Tag: HashStable<StableHashingContext<'ctx>>
|
||||||
|
{
|
||||||
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'ctx>, hasher: &mut StableHasher) {
|
||||||
|
self.body.hash_stable(hcx, hasher);
|
||||||
|
self.instance.hash_stable(hcx, hasher);
|
||||||
|
self.span.hash_stable(hcx, hasher);
|
||||||
|
self.return_to_block.hash_stable(hcx, hasher);
|
||||||
|
self.return_place.as_ref().map(|r| &**r).hash_stable(hcx, hasher);
|
||||||
|
self.locals.hash_stable(hcx, hasher);
|
||||||
|
self.block.hash_stable(hcx, hasher);
|
||||||
|
self.stmt.hash_stable(hcx, hasher);
|
||||||
|
self.extra.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -304,18 +304,6 @@ struct FrameSnapshot<'a, 'tcx> {
|
||||||
stmt: usize,
|
stmt: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_stable_hash_for!(impl<> for struct Frame<'mir, 'tcx> {
|
|
||||||
body,
|
|
||||||
instance,
|
|
||||||
span,
|
|
||||||
return_to_block,
|
|
||||||
return_place -> (return_place.as_ref().map(|r| &**r)),
|
|
||||||
locals,
|
|
||||||
block,
|
|
||||||
stmt,
|
|
||||||
extra,
|
|
||||||
});
|
|
||||||
|
|
||||||
impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>
|
impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>
|
||||||
where Ctx: SnapshotContext<'a>,
|
where Ctx: SnapshotContext<'a>,
|
||||||
{
|
{
|
||||||
|
|
|
@ -17,3 +17,8 @@
|
||||||
|
|
||||||
pub mod abi;
|
pub mod abi;
|
||||||
pub mod spec;
|
pub mod spec;
|
||||||
|
|
||||||
|
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||||
|
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||||
|
/// instead of implementing everything in librustc.
|
||||||
|
pub trait HashStableContext {}
|
||||||
|
|
|
@ -33,6 +33,7 @@ use syntax_pos::symbol::{kw, sym, Symbol};
|
||||||
use syntax_pos::{Span, DUMMY_SP, ExpnId};
|
use syntax_pos::{Span, DUMMY_SP, ExpnId};
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_data_structures::thin_vec::ThinVec;
|
use rustc_data_structures::thin_vec::ThinVec;
|
||||||
use rustc_index::vec::Idx;
|
use rustc_index::vec::Idx;
|
||||||
|
@ -54,7 +55,7 @@ mod tests;
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// `'outer` is a label.
|
/// `'outer` is a label.
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Copy)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Copy, HashStable_Generic)]
|
||||||
pub struct Label {
|
pub struct Label {
|
||||||
pub ident: Ident,
|
pub ident: Ident,
|
||||||
}
|
}
|
||||||
|
@ -112,6 +113,15 @@ impl PartialEq<Symbol> for Path {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<CTX> HashStable<CTX> for Path {
|
||||||
|
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
||||||
|
self.segments.len().hash_stable(hcx, hasher);
|
||||||
|
for segment in &self.segments {
|
||||||
|
segment.ident.name.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Path {
|
impl Path {
|
||||||
// Convert a span and an identifier to the corresponding
|
// Convert a span and an identifier to the corresponding
|
||||||
// one-segment path.
|
// one-segment path.
|
||||||
|
@ -473,7 +483,7 @@ pub struct Crate {
|
||||||
/// Possible values inside of compile-time attribute lists.
|
/// Possible values inside of compile-time attribute lists.
|
||||||
///
|
///
|
||||||
/// E.g., the '..' in `#[name(..)]`.
|
/// E.g., the '..' in `#[name(..)]`.
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub enum NestedMetaItem {
|
pub enum NestedMetaItem {
|
||||||
/// A full MetaItem, for recursive meta items.
|
/// A full MetaItem, for recursive meta items.
|
||||||
MetaItem(MetaItem),
|
MetaItem(MetaItem),
|
||||||
|
@ -486,7 +496,7 @@ pub enum NestedMetaItem {
|
||||||
/// A spanned compile-time attribute item.
|
/// A spanned compile-time attribute item.
|
||||||
///
|
///
|
||||||
/// E.g., `#[test]`, `#[derive(..)]`, `#[rustfmt::skip]` or `#[feature = "foo"]`.
|
/// E.g., `#[test]`, `#[derive(..)]`, `#[rustfmt::skip]` or `#[feature = "foo"]`.
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub struct MetaItem {
|
pub struct MetaItem {
|
||||||
pub path: Path,
|
pub path: Path,
|
||||||
pub kind: MetaItemKind,
|
pub kind: MetaItemKind,
|
||||||
|
@ -496,7 +506,7 @@ pub struct MetaItem {
|
||||||
/// A compile-time attribute item.
|
/// A compile-time attribute item.
|
||||||
///
|
///
|
||||||
/// E.g., `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`.
|
/// E.g., `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`.
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub enum MetaItemKind {
|
pub enum MetaItemKind {
|
||||||
/// Word meta item.
|
/// Word meta item.
|
||||||
///
|
///
|
||||||
|
@ -1426,7 +1436,7 @@ pub enum StrStyle {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An AST literal.
|
/// An AST literal.
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub struct Lit {
|
pub struct Lit {
|
||||||
/// The original literal token as written in source code.
|
/// The original literal token as written in source code.
|
||||||
pub token: token::Lit,
|
pub token: token::Lit,
|
||||||
|
@ -2286,7 +2296,7 @@ impl rustc_serialize::Decodable for AttrId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub struct AttrItem {
|
pub struct AttrItem {
|
||||||
pub path: Path,
|
pub path: Path,
|
||||||
pub tokens: TokenStream,
|
pub tokens: TokenStream,
|
||||||
|
|
|
@ -110,3 +110,8 @@ pub mod print {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod early_buffered_lints;
|
pub mod early_buffered_lints;
|
||||||
|
|
||||||
|
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||||
|
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||||
|
/// instead of implementing everything in librustc.
|
||||||
|
pub trait HashStableContext: syntax_pos::HashStableContext {}
|
||||||
|
|
|
@ -14,10 +14,12 @@ use syntax_pos::{self, Span, DUMMY_SP};
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_macros::HashStable_Generic;
|
use rustc_macros::HashStable_Generic;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
|
#[derive(HashStable_Generic)]
|
||||||
pub enum BinOpToken {
|
pub enum BinOpToken {
|
||||||
Plus,
|
Plus,
|
||||||
Minus,
|
Minus,
|
||||||
|
@ -33,6 +35,7 @@ pub enum BinOpToken {
|
||||||
|
|
||||||
/// A delimiter token.
|
/// A delimiter token.
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
|
#[derive(HashStable_Generic)]
|
||||||
pub enum DelimToken {
|
pub enum DelimToken {
|
||||||
/// A round parenthesis (i.e., `(` or `)`).
|
/// A round parenthesis (i.e., `(` or `)`).
|
||||||
Paren,
|
Paren,
|
||||||
|
@ -190,7 +193,7 @@ fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool {
|
||||||
].contains(&name)
|
].contains(&name)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub enum TokenKind {
|
pub enum TokenKind {
|
||||||
/* Expression-operator symbols. */
|
/* Expression-operator symbols. */
|
||||||
Eq,
|
Eq,
|
||||||
|
@ -262,7 +265,7 @@ pub enum TokenKind {
|
||||||
#[cfg(target_arch = "x86_64")]
|
#[cfg(target_arch = "x86_64")]
|
||||||
rustc_data_structures::static_assert_size!(TokenKind, 16);
|
rustc_data_structures::static_assert_size!(TokenKind, 16);
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub struct Token {
|
pub struct Token {
|
||||||
pub kind: TokenKind,
|
pub kind: TokenKind,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
|
@ -725,3 +728,11 @@ impl fmt::Debug for Nonterminal {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<CTX> HashStable<CTX> for Nonterminal
|
||||||
|
where CTX: crate::HashStableContext
|
||||||
|
{
|
||||||
|
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
|
||||||
|
panic!("interpolated tokens should not be present in the HIR")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
use crate::token::{self, DelimToken, Token, TokenKind};
|
use crate::token::{self, DelimToken, Token, TokenKind};
|
||||||
|
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
use syntax_pos::{Span, DUMMY_SP};
|
||||||
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
|
use rustc_macros::HashStable_Generic;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use smallvec::{SmallVec, smallvec};
|
use smallvec::{SmallVec, smallvec};
|
||||||
|
|
||||||
|
@ -33,7 +35,7 @@ use std::{iter, mem};
|
||||||
///
|
///
|
||||||
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
|
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
|
||||||
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
|
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
|
||||||
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable_Generic)]
|
||||||
pub enum TokenTree {
|
pub enum TokenTree {
|
||||||
/// A single token
|
/// A single token
|
||||||
Token(Token),
|
Token(Token),
|
||||||
|
@ -115,6 +117,16 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<CTX> HashStable<CTX> for TokenStream
|
||||||
|
where CTX: crate::HashStableContext
|
||||||
|
{
|
||||||
|
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
||||||
|
for sub_tt in self.trees() {
|
||||||
|
sub_tt.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
|
/// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
|
||||||
///
|
///
|
||||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
||||||
|
@ -444,7 +456,7 @@ impl Cursor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable_Generic)]
|
||||||
pub struct DelimSpan {
|
pub struct DelimSpan {
|
||||||
pub open: Span,
|
pub open: Span,
|
||||||
pub close: Span,
|
pub close: Span,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use syntax::source_map::SourceMap;
|
use crate::source_map::SourceMap;
|
||||||
use syntax_pos::{BytePos, SourceFile};
|
use crate::{BytePos, SourceFile};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct CacheEntry {
|
struct CacheEntry {
|
|
@ -616,12 +616,13 @@ impl Span {
|
||||||
|
|
||||||
/// A subset of properties from both macro definition and macro call available through global data.
|
/// A subset of properties from both macro definition and macro call available through global data.
|
||||||
/// Avoid using this if you have access to the original definition or call structures.
|
/// Avoid using this if you have access to the original definition or call structures.
|
||||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
|
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable_Generic)]
|
||||||
pub struct ExpnData {
|
pub struct ExpnData {
|
||||||
// --- The part unique to each expansion.
|
// --- The part unique to each expansion.
|
||||||
/// The kind of this expansion - macro or compiler desugaring.
|
/// The kind of this expansion - macro or compiler desugaring.
|
||||||
pub kind: ExpnKind,
|
pub kind: ExpnKind,
|
||||||
/// The expansion that produced this expansion.
|
/// The expansion that produced this expansion.
|
||||||
|
#[stable_hasher(ignore)]
|
||||||
pub parent: ExpnId,
|
pub parent: ExpnId,
|
||||||
/// The location of the actual macro invocation or syntax sugar , e.g.
|
/// The location of the actual macro invocation or syntax sugar , e.g.
|
||||||
/// `let x = foo!();` or `if let Some(y) = x {}`
|
/// `let x = foo!();` or `if let Some(y) = x {}`
|
||||||
|
|
|
@ -18,6 +18,8 @@ use rustc_serialize::{Encodable, Decodable, Encoder, Decoder};
|
||||||
use rustc_macros::HashStable_Generic;
|
use rustc_macros::HashStable_Generic;
|
||||||
|
|
||||||
pub mod source_map;
|
pub mod source_map;
|
||||||
|
mod caching_source_map_view;
|
||||||
|
pub use self::caching_source_map_view::CachingSourceMapView;
|
||||||
|
|
||||||
pub mod edition;
|
pub mod edition;
|
||||||
use edition::Edition;
|
use edition::Edition;
|
||||||
|
@ -34,11 +36,13 @@ pub use symbol::{Symbol, sym};
|
||||||
mod analyze_source_file;
|
mod analyze_source_file;
|
||||||
pub mod fatal_error;
|
pub mod fatal_error;
|
||||||
|
|
||||||
use rustc_data_structures::stable_hasher::StableHasher;
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
|
use rustc_data_structures::fingerprint::Fingerprint;
|
||||||
use rustc_data_structures::sync::{Lrc, Lock};
|
use rustc_data_structures::sync::{Lrc, Lock};
|
||||||
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cell::Cell;
|
use std::cell::{Cell, RefCell};
|
||||||
use std::cmp::{self, Ordering};
|
use std::cmp::{self, Ordering};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::hash::{Hasher, Hash};
|
use std::hash::{Hasher, Hash};
|
||||||
|
@ -1562,3 +1566,96 @@ fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize {
|
||||||
Err(line) => line as isize - 1
|
Err(line) => line as isize - 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||||
|
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||||
|
/// instead of implementing everything in librustc.
|
||||||
|
pub trait HashStableContext {
|
||||||
|
fn hash_spans(&self) -> bool;
|
||||||
|
fn byte_pos_to_line_and_col(&mut self, byte: BytePos)
|
||||||
|
-> Option<(Lrc<SourceFile>, usize, BytePos)>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<CTX> HashStable<CTX> for Span
|
||||||
|
where CTX: HashStableContext
|
||||||
|
{
|
||||||
|
/// Hashes a span in a stable way. We can't directly hash the span's `BytePos`
|
||||||
|
/// fields (that would be similar to hashing pointers, since those are just
|
||||||
|
/// offsets into the `SourceMap`). Instead, we hash the (file name, line, column)
|
||||||
|
/// triple, which stays the same even if the containing `SourceFile` has moved
|
||||||
|
/// within the `SourceMap`.
|
||||||
|
/// Also note that we are hashing byte offsets for the column, not unicode
|
||||||
|
/// codepoint offsets. For the purpose of the hash that's sufficient.
|
||||||
|
/// Also, hashing filenames is expensive so we avoid doing it twice when the
|
||||||
|
/// span starts and ends in the same file, which is almost always the case.
|
||||||
|
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||||
|
const TAG_VALID_SPAN: u8 = 0;
|
||||||
|
const TAG_INVALID_SPAN: u8 = 1;
|
||||||
|
const TAG_EXPANSION: u8 = 0;
|
||||||
|
const TAG_NO_EXPANSION: u8 = 1;
|
||||||
|
|
||||||
|
if !ctx.hash_spans() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if *self == DUMMY_SP {
|
||||||
|
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this is not an empty or invalid span, we want to hash the last
|
||||||
|
// position that belongs to it, as opposed to hashing the first
|
||||||
|
// position past it.
|
||||||
|
let span = self.data();
|
||||||
|
let (file_lo, line_lo, col_lo) = match ctx.byte_pos_to_line_and_col(span.lo) {
|
||||||
|
Some(pos) => pos,
|
||||||
|
None => {
|
||||||
|
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !file_lo.contains(span.hi) {
|
||||||
|
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::hash::Hash::hash(&TAG_VALID_SPAN, hasher);
|
||||||
|
// We truncate the stable ID hash and line and column numbers. The chances
|
||||||
|
// of causing a collision this way should be minimal.
|
||||||
|
std::hash::Hash::hash(&(file_lo.name_hash as u64), hasher);
|
||||||
|
|
||||||
|
let col = (col_lo.0 as u64) & 0xFF;
|
||||||
|
let line = ((line_lo as u64) & 0xFF_FF_FF) << 8;
|
||||||
|
let len = ((span.hi - span.lo).0 as u64) << 32;
|
||||||
|
let line_col_len = col | line | len;
|
||||||
|
std::hash::Hash::hash(&line_col_len, hasher);
|
||||||
|
|
||||||
|
if span.ctxt == SyntaxContext::root() {
|
||||||
|
TAG_NO_EXPANSION.hash_stable(ctx, hasher);
|
||||||
|
} else {
|
||||||
|
TAG_EXPANSION.hash_stable(ctx, hasher);
|
||||||
|
|
||||||
|
// Since the same expansion context is usually referenced many
|
||||||
|
// times, we cache a stable hash of it and hash that instead of
|
||||||
|
// recursing every time.
|
||||||
|
thread_local! {
|
||||||
|
static CACHE: RefCell<FxHashMap<hygiene::ExpnId, u64>> = Default::default();
|
||||||
|
}
|
||||||
|
|
||||||
|
let sub_hash: u64 = CACHE.with(|cache| {
|
||||||
|
let expn_id = span.ctxt.outer_expn();
|
||||||
|
|
||||||
|
if let Some(&sub_hash) = cache.borrow().get(&expn_id) {
|
||||||
|
return sub_hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut hasher = StableHasher::new();
|
||||||
|
expn_id.expn_data().hash_stable(ctx, &mut hasher);
|
||||||
|
let sub_hash: Fingerprint = hasher.finish();
|
||||||
|
let sub_hash = sub_hash.to_smaller_hash();
|
||||||
|
cache.borrow_mut().insert(expn_id, sub_hash);
|
||||||
|
sub_hash
|
||||||
|
});
|
||||||
|
|
||||||
|
sub_hash.hash_stable(ctx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, HashStable_Generic)]
|
||||||
pub struct Spanned<T> {
|
pub struct Spanned<T> {
|
||||||
pub node: T,
|
pub node: T,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
use arena::DroplessArena;
|
use arena::DroplessArena;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_index::vec::Idx;
|
use rustc_index::vec::Idx;
|
||||||
use rustc_macros::symbols;
|
use rustc_macros::{symbols, HashStable_Generic};
|
||||||
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||||
use rustc_serialize::{UseSpecializedDecodable, UseSpecializedEncodable};
|
use rustc_serialize::{UseSpecializedDecodable, UseSpecializedEncodable};
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher};
|
||||||
|
@ -754,7 +754,7 @@ symbols! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Eq)]
|
#[derive(Copy, Clone, Eq, HashStable_Generic)]
|
||||||
pub struct Ident {
|
pub struct Ident {
|
||||||
pub name: Symbol,
|
pub name: Symbol,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue