Auto merge of #35854 - nikomatsakis:incr-comp-cache-hash-35549, r=mw
compute and cache HIR hashes at beginning This avoids the compile-time overhead of computing them twice. It also fixes an issue where the hash computed after typeck is differen than the hash before, because typeck mutates the def-map in place. Fixes #35549. Fixes #35593. Some performance measurements suggest this `HashesMap` is very small in memory (unobservable via `-Z time-passes`) and very cheap to construct. I do see some (very minor) performance wins in the incremental case after the first run -- the first run costs more because loading the dep-graph didn't have any hashing to do in that case. Example timings from two runs of `libsyntex-syntax` -- the (1) indicates first run, (2) indicates second run, and (*) indicates both together: | Phase | Master | Branch | | ---- | ---- | ---- | | compute_hashes_map (1) | N/A | 0.343 | | load_dep_graph (1) | 0 | 0 | | serialize dep graph (1) | 4.190 | 3.920 | | total (1) | 4.190 | 4.260 | | compute_hashes_map (2) | N/A | 0.344 | | load_dep_graph (2) | 0.592 | 0.252 | | serialize dep graph (2) | 4.119 | 3.779 | | total (2) | 4.71 | 4.375 | | total (*) | 8.9 | 8.635 | r? @michaelwoerister
This commit is contained in:
commit
012f45eaf7
19 changed files with 352 additions and 178 deletions
|
@ -147,6 +147,11 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if label == "Krate" {
|
||||||
|
// special case
|
||||||
|
return Ok(DepNode::Krate);
|
||||||
|
}
|
||||||
|
|
||||||
check! {
|
check! {
|
||||||
CollectItem,
|
CollectItem,
|
||||||
BorrowCheck,
|
BorrowCheck,
|
||||||
|
|
|
@ -1621,7 +1621,7 @@ pub type FreevarMap = NodeMap<Vec<Freevar>>;
|
||||||
|
|
||||||
pub type CaptureModeMap = NodeMap<CaptureClause>;
|
pub type CaptureModeMap = NodeMap<CaptureClause>;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct TraitCandidate {
|
pub struct TraitCandidate {
|
||||||
pub def_id: DefId,
|
pub def_id: DefId,
|
||||||
pub import_id: Option<NodeId>,
|
pub import_id: Option<NodeId>,
|
||||||
|
|
|
@ -26,7 +26,7 @@ use rustc::util::common::time;
|
||||||
use rustc::util::nodemap::NodeSet;
|
use rustc::util::nodemap::NodeSet;
|
||||||
use rustc_back::sha2::{Sha256, Digest};
|
use rustc_back::sha2::{Sha256, Digest};
|
||||||
use rustc_borrowck as borrowck;
|
use rustc_borrowck as borrowck;
|
||||||
use rustc_incremental;
|
use rustc_incremental::{self, IncrementalHashesMap};
|
||||||
use rustc_resolve::{MakeGlobMap, Resolver};
|
use rustc_resolve::{MakeGlobMap, Resolver};
|
||||||
use rustc_metadata::macro_import;
|
use rustc_metadata::macro_import;
|
||||||
use rustc_metadata::creader::read_local_crates;
|
use rustc_metadata::creader::read_local_crates;
|
||||||
|
@ -172,7 +172,7 @@ pub fn compile_input(sess: &Session,
|
||||||
resolutions,
|
resolutions,
|
||||||
&arenas,
|
&arenas,
|
||||||
&crate_name,
|
&crate_name,
|
||||||
|tcx, mir_map, analysis, result| {
|
|tcx, mir_map, analysis, incremental_hashes_map, result| {
|
||||||
{
|
{
|
||||||
// Eventually, we will want to track plugins.
|
// Eventually, we will want to track plugins.
|
||||||
let _ignore = tcx.dep_graph.in_ignore();
|
let _ignore = tcx.dep_graph.in_ignore();
|
||||||
|
@ -202,7 +202,8 @@ pub fn compile_input(sess: &Session,
|
||||||
}
|
}
|
||||||
let trans = phase_4_translate_to_llvm(tcx,
|
let trans = phase_4_translate_to_llvm(tcx,
|
||||||
mir_map.unwrap(),
|
mir_map.unwrap(),
|
||||||
analysis);
|
analysis,
|
||||||
|
&incremental_hashes_map);
|
||||||
|
|
||||||
if log_enabled!(::log::INFO) {
|
if log_enabled!(::log::INFO) {
|
||||||
println!("Post-trans");
|
println!("Post-trans");
|
||||||
|
@ -797,14 +798,15 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
|
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
Option<MirMap<'tcx>>,
|
Option<MirMap<'tcx>>,
|
||||||
ty::CrateAnalysis,
|
ty::CrateAnalysis,
|
||||||
|
IncrementalHashesMap,
|
||||||
CompileResult) -> R
|
CompileResult) -> R
|
||||||
{
|
{
|
||||||
macro_rules! try_with_f {
|
macro_rules! try_with_f {
|
||||||
($e: expr, ($t: expr, $m: expr, $a: expr)) => {
|
($e: expr, ($t: expr, $m: expr, $a: expr, $h: expr)) => {
|
||||||
match $e {
|
match $e {
|
||||||
Ok(x) => x,
|
Ok(x) => x,
|
||||||
Err(x) => {
|
Err(x) => {
|
||||||
f($t, $m, $a, Err(x));
|
f($t, $m, $a, $h, Err(x));
|
||||||
return Err(x);
|
return Err(x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -860,12 +862,16 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
index,
|
index,
|
||||||
name,
|
name,
|
||||||
|tcx| {
|
|tcx| {
|
||||||
|
let incremental_hashes_map =
|
||||||
|
time(time_passes,
|
||||||
|
"compute_incremental_hashes_map",
|
||||||
|
|| rustc_incremental::compute_incremental_hashes_map(tcx));
|
||||||
time(time_passes,
|
time(time_passes,
|
||||||
"load_dep_graph",
|
"load_dep_graph",
|
||||||
|| rustc_incremental::load_dep_graph(tcx));
|
|| rustc_incremental::load_dep_graph(tcx, &incremental_hashes_map));
|
||||||
|
|
||||||
// passes are timed inside typeck
|
// passes are timed inside typeck
|
||||||
try_with_f!(typeck::check_crate(tcx), (tcx, None, analysis));
|
try_with_f!(typeck::check_crate(tcx), (tcx, None, analysis, incremental_hashes_map));
|
||||||
|
|
||||||
time(time_passes,
|
time(time_passes,
|
||||||
"const checking",
|
"const checking",
|
||||||
|
@ -935,7 +941,11 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
// lint warnings and so on -- kindck used to do this abort, but
|
// lint warnings and so on -- kindck used to do this abort, but
|
||||||
// kindck is gone now). -nmatsakis
|
// kindck is gone now). -nmatsakis
|
||||||
if sess.err_count() > 0 {
|
if sess.err_count() > 0 {
|
||||||
return Ok(f(tcx, Some(mir_map), analysis, Err(sess.err_count())));
|
return Ok(f(tcx,
|
||||||
|
Some(mir_map),
|
||||||
|
analysis,
|
||||||
|
incremental_hashes_map,
|
||||||
|
Err(sess.err_count())));
|
||||||
}
|
}
|
||||||
|
|
||||||
analysis.reachable =
|
analysis.reachable =
|
||||||
|
@ -963,17 +973,22 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||||
|
|
||||||
// The above three passes generate errors w/o aborting
|
// The above three passes generate errors w/o aborting
|
||||||
if sess.err_count() > 0 {
|
if sess.err_count() > 0 {
|
||||||
return Ok(f(tcx, Some(mir_map), analysis, Err(sess.err_count())));
|
return Ok(f(tcx,
|
||||||
|
Some(mir_map),
|
||||||
|
analysis,
|
||||||
|
incremental_hashes_map,
|
||||||
|
Err(sess.err_count())));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(f(tcx, Some(mir_map), analysis, Ok(())))
|
Ok(f(tcx, Some(mir_map), analysis, incremental_hashes_map, Ok(())))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run the translation phase to LLVM, after which the AST and analysis can
|
/// Run the translation phase to LLVM, after which the AST and analysis can
|
||||||
pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
mut mir_map: MirMap<'tcx>,
|
mut mir_map: MirMap<'tcx>,
|
||||||
analysis: ty::CrateAnalysis)
|
analysis: ty::CrateAnalysis,
|
||||||
|
incremental_hashes_map: &IncrementalHashesMap)
|
||||||
-> trans::CrateTranslation {
|
-> trans::CrateTranslation {
|
||||||
let time_passes = tcx.sess.time_passes();
|
let time_passes = tcx.sess.time_passes();
|
||||||
|
|
||||||
|
@ -1007,7 +1022,7 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
let translation =
|
let translation =
|
||||||
time(time_passes,
|
time(time_passes,
|
||||||
"translation",
|
"translation",
|
||||||
move || trans::trans_crate(tcx, &mir_map, analysis));
|
move || trans::trans_crate(tcx, &mir_map, analysis, &incremental_hashes_map));
|
||||||
|
|
||||||
time(time_passes,
|
time(time_passes,
|
||||||
"assert dep graph",
|
"assert dep graph",
|
||||||
|
@ -1015,7 +1030,7 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
|
||||||
time(time_passes,
|
time(time_passes,
|
||||||
"serialize dep graph",
|
"serialize dep graph",
|
||||||
move || rustc_incremental::save_dep_graph(tcx));
|
move || rustc_incremental::save_dep_graph(tcx, &incremental_hashes_map));
|
||||||
|
|
||||||
translation
|
translation
|
||||||
}
|
}
|
||||||
|
|
|
@ -234,7 +234,7 @@ impl PpSourceMode {
|
||||||
resolutions.clone(),
|
resolutions.clone(),
|
||||||
arenas,
|
arenas,
|
||||||
id,
|
id,
|
||||||
|tcx, _, _, _| {
|
|tcx, _, _, _, _| {
|
||||||
let annotation = TypedAnnotation {
|
let annotation = TypedAnnotation {
|
||||||
tcx: tcx,
|
tcx: tcx,
|
||||||
};
|
};
|
||||||
|
@ -951,7 +951,7 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
resolutions.clone(),
|
resolutions.clone(),
|
||||||
arenas,
|
arenas,
|
||||||
crate_name,
|
crate_name,
|
||||||
|tcx, mir_map, _, _| {
|
|tcx, mir_map, _, _, _| {
|
||||||
match ppm {
|
match ppm {
|
||||||
PpmMir | PpmMirCFG => {
|
PpmMir | PpmMirCFG => {
|
||||||
if let Some(mir_map) = mir_map {
|
if let Some(mir_map) = mir_map {
|
||||||
|
|
36
src/librustc_incremental/calculate_svh/def_path_hash.rs
Normal file
36
src/librustc_incremental/calculate_svh/def_path_hash.rs
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use rustc::hir::def_id::DefId;
|
||||||
|
use rustc::ty::TyCtxt;
|
||||||
|
use rustc::util::nodemap::DefIdMap;
|
||||||
|
|
||||||
|
pub struct DefPathHashes<'a, 'tcx: 'a> {
|
||||||
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
data: DefIdMap<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'tcx> DefPathHashes<'a, 'tcx> {
|
||||||
|
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
|
||||||
|
DefPathHashes {
|
||||||
|
tcx: tcx,
|
||||||
|
data: DefIdMap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash(&mut self, def_id: DefId) -> u64 {
|
||||||
|
let tcx = self.tcx;
|
||||||
|
*self.data.entry(def_id)
|
||||||
|
.or_insert_with(|| {
|
||||||
|
let def_path = tcx.def_path(def_id);
|
||||||
|
def_path.deterministic_hash(tcx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,106 +8,137 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
//! Calculation of a Strict Version Hash for crates. For a length
|
//! Calculation of the (misnamed) "strict version hash" for crates and
|
||||||
//! comment explaining the general idea, see `librustc/middle/svh.rs`.
|
//! items. This hash is used to tell when the HIR changed in such a
|
||||||
|
//! way that results from previous compilations may no longer be
|
||||||
|
//! applicable and hence must be recomputed. It should probably be
|
||||||
|
//! renamed to the ICH (incremental compilation hash).
|
||||||
|
//!
|
||||||
|
//! The hashes for all items are computed once at the beginning of
|
||||||
|
//! compilation and stored into a map. In addition, a hash is computed
|
||||||
|
//! of the **entire crate**.
|
||||||
|
//!
|
||||||
|
//! Storing the hashes in a map avoids the need to compute them twice
|
||||||
|
//! (once when loading prior incremental results and once when
|
||||||
|
//! saving), but it is also important for correctness: at least as of
|
||||||
|
//! the time of this writing, the typeck passes rewrites entries in
|
||||||
|
//! the dep-map in-place to accommodate UFCS resolutions. Since name
|
||||||
|
//! resolution is part of the hash, the result is that hashes computed
|
||||||
|
//! at the end of compilation would be different from those computed
|
||||||
|
//! at the beginning.
|
||||||
|
|
||||||
|
use syntax::ast;
|
||||||
use syntax::attr::AttributeMethods;
|
use syntax::attr::AttributeMethods;
|
||||||
use std::hash::{Hash, SipHasher, Hasher};
|
use std::hash::{Hash, SipHasher, Hasher};
|
||||||
|
use rustc::dep_graph::DepNode;
|
||||||
|
use rustc::hir;
|
||||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
|
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
|
||||||
use rustc::hir::map::{NodeItem, NodeForeignItem};
|
use rustc::hir::intravisit as visit;
|
||||||
use rustc::hir::svh::Svh;
|
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc::hir::intravisit::{self, Visitor};
|
use rustc_data_structures::fnv::FnvHashMap;
|
||||||
|
|
||||||
|
use self::def_path_hash::DefPathHashes;
|
||||||
use self::svh_visitor::StrictVersionHashVisitor;
|
use self::svh_visitor::StrictVersionHashVisitor;
|
||||||
|
|
||||||
|
mod def_path_hash;
|
||||||
mod svh_visitor;
|
mod svh_visitor;
|
||||||
|
|
||||||
pub trait SvhCalculate {
|
pub type IncrementalHashesMap = FnvHashMap<DepNode<DefId>, u64>;
|
||||||
/// Calculate the SVH for an entire krate.
|
|
||||||
fn calculate_krate_hash(self) -> Svh;
|
|
||||||
|
|
||||||
/// Calculate the SVH for a particular item.
|
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||||
fn calculate_item_hash(self, def_id: DefId) -> u64;
|
-> IncrementalHashesMap {
|
||||||
|
let _ignore = tcx.dep_graph.in_ignore();
|
||||||
|
let krate = tcx.map.krate();
|
||||||
|
let mut visitor = HashItemsVisitor { tcx: tcx,
|
||||||
|
hashes: FnvHashMap(),
|
||||||
|
def_path_hashes: DefPathHashes::new(tcx) };
|
||||||
|
visitor.calculate_def_id(DefId::local(CRATE_DEF_INDEX), |v| visit::walk_crate(v, krate));
|
||||||
|
krate.visit_all_items(&mut visitor);
|
||||||
|
visitor.compute_crate_hash();
|
||||||
|
visitor.hashes
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> {
|
struct HashItemsVisitor<'a, 'tcx: 'a> {
|
||||||
fn calculate_krate_hash(self) -> Svh {
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
// FIXME (#14132): This is better than it used to be, but it still not
|
def_path_hashes: DefPathHashes<'a, 'tcx>,
|
||||||
// ideal. We now attempt to hash only the relevant portions of the
|
hashes: IncrementalHashesMap,
|
||||||
// Crate AST as well as the top-level crate attributes. (However,
|
}
|
||||||
// the hashing of the crate attributes should be double-checked
|
|
||||||
// to ensure it is not incorporating implementation artifacts into
|
|
||||||
// the hash that are not otherwise visible.)
|
|
||||||
|
|
||||||
let crate_disambiguator = self.sess.local_crate_disambiguator();
|
impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
|
||||||
let krate = self.map.krate();
|
fn calculate_node_id<W>(&mut self, id: ast::NodeId, walk_op: W)
|
||||||
|
where W: for<'v> FnMut(&mut StrictVersionHashVisitor<'v, 'a, 'tcx>)
|
||||||
|
{
|
||||||
|
let def_id = self.tcx.map.local_def_id(id);
|
||||||
|
self.calculate_def_id(def_id, walk_op)
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: this should use SHA1, not SipHash. SipHash is not built to
|
fn calculate_def_id<W>(&mut self, def_id: DefId, mut walk_op: W)
|
||||||
// avoid collisions.
|
where W: for<'v> FnMut(&mut StrictVersionHashVisitor<'v, 'a, 'tcx>)
|
||||||
|
{
|
||||||
|
assert!(def_id.is_local());
|
||||||
|
debug!("HashItemsVisitor::calculate(def_id={:?})", def_id);
|
||||||
|
// FIXME: this should use SHA1, not SipHash. SipHash is not
|
||||||
|
// built to avoid collisions.
|
||||||
let mut state = SipHasher::new();
|
let mut state = SipHasher::new();
|
||||||
debug!("state: {:?}", state);
|
walk_op(&mut StrictVersionHashVisitor::new(&mut state,
|
||||||
|
self.tcx,
|
||||||
|
&mut self.def_path_hashes));
|
||||||
|
let item_hash = state.finish();
|
||||||
|
self.hashes.insert(DepNode::Hir(def_id), item_hash);
|
||||||
|
debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, item_hash);
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME(#32753) -- at (*) we `to_le` for endianness, but is
|
fn compute_crate_hash(&mut self) {
|
||||||
// this enough, and does it matter anyway?
|
let krate = self.tcx.map.krate();
|
||||||
"crate_disambiguator".hash(&mut state);
|
|
||||||
crate_disambiguator.len().to_le().hash(&mut state); // (*)
|
|
||||||
crate_disambiguator.hash(&mut state);
|
|
||||||
|
|
||||||
debug!("crate_disambiguator: {:?}", crate_disambiguator);
|
let mut crate_state = SipHasher::new();
|
||||||
debug!("state: {:?}", state);
|
|
||||||
|
|
||||||
|
let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
|
||||||
|
"crate_disambiguator".hash(&mut crate_state);
|
||||||
|
crate_disambiguator.len().hash(&mut crate_state);
|
||||||
|
crate_disambiguator.hash(&mut crate_state);
|
||||||
|
|
||||||
|
// add each item (in some deterministic order) to the overall
|
||||||
|
// crate hash.
|
||||||
{
|
{
|
||||||
let mut visit = StrictVersionHashVisitor::new(&mut state, self);
|
let def_path_hashes = &mut self.def_path_hashes;
|
||||||
krate.visit_all_items(&mut visit);
|
let mut item_hashes: Vec<_> =
|
||||||
|
self.hashes.iter()
|
||||||
|
.map(|(item_dep_node, &item_hash)| {
|
||||||
|
// convert from a DepNode<DefId> tp a
|
||||||
|
// DepNode<u64> where the u64 is the
|
||||||
|
// hash of the def-id's def-path:
|
||||||
|
let item_dep_node =
|
||||||
|
item_dep_node.map_def(|&did| Some(def_path_hashes.hash(did)))
|
||||||
|
.unwrap();
|
||||||
|
(item_dep_node, item_hash)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
item_hashes.sort(); // avoid artificial dependencies on item ordering
|
||||||
|
item_hashes.hash(&mut crate_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME (#14132): This hash is still sensitive to e.g. the
|
|
||||||
// spans of the crate Attributes and their underlying
|
|
||||||
// MetaItems; we should make ContentHashable impl for those
|
|
||||||
// types and then use hash_content. But, since all crate
|
|
||||||
// attributes should appear near beginning of the file, it is
|
|
||||||
// not such a big deal to be sensitive to their spans for now.
|
|
||||||
//
|
|
||||||
// We hash only the MetaItems instead of the entire Attribute
|
|
||||||
// to avoid hashing the AttrId
|
|
||||||
for attr in &krate.attrs {
|
for attr in &krate.attrs {
|
||||||
debug!("krate attr {:?}", attr);
|
debug!("krate attr {:?}", attr);
|
||||||
attr.meta().hash(&mut state);
|
attr.meta().hash(&mut crate_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
Svh::new(state.finish())
|
let crate_hash = crate_state.finish();
|
||||||
}
|
self.hashes.insert(DepNode::Krate, crate_hash);
|
||||||
|
debug!("calculate_crate_hash: crate_hash={:?}", crate_hash);
|
||||||
fn calculate_item_hash(self, def_id: DefId) -> u64 {
|
|
||||||
assert!(def_id.is_local());
|
|
||||||
|
|
||||||
debug!("calculate_item_hash(def_id={:?})", def_id);
|
|
||||||
|
|
||||||
let mut state = SipHasher::new();
|
|
||||||
|
|
||||||
{
|
|
||||||
let mut visit = StrictVersionHashVisitor::new(&mut state, self);
|
|
||||||
if def_id.index == CRATE_DEF_INDEX {
|
|
||||||
// the crate root itself is not registered in the map
|
|
||||||
// as an item, so we have to fetch it this way
|
|
||||||
let krate = self.map.krate();
|
|
||||||
intravisit::walk_crate(&mut visit, krate);
|
|
||||||
} else {
|
|
||||||
let node_id = self.map.as_local_node_id(def_id).unwrap();
|
|
||||||
match self.map.find(node_id) {
|
|
||||||
Some(NodeItem(item)) => visit.visit_item(item),
|
|
||||||
Some(NodeForeignItem(item)) => visit.visit_foreign_item(item),
|
|
||||||
r => bug!("calculate_item_hash: expected an item for node {} not {:?}",
|
|
||||||
node_id, r),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let hash = state.finish();
|
|
||||||
|
|
||||||
debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, hash);
|
|
||||||
|
|
||||||
hash
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl<'a, 'tcx> visit::Visitor<'tcx> for HashItemsVisitor<'a, 'tcx> {
|
||||||
|
fn visit_item(&mut self, item: &'tcx hir::Item) {
|
||||||
|
self.calculate_node_id(item.id, |v| v.visit_item(item));
|
||||||
|
visit::walk_item(self, item);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem) {
|
||||||
|
self.calculate_node_id(item.id, |v| v.visit_foreign_item(item));
|
||||||
|
visit::walk_foreign_item(self, item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,25 +25,30 @@ use rustc::hir::def::{Def, PathResolution};
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::intravisit as visit;
|
use rustc::hir::intravisit as visit;
|
||||||
use rustc::hir::intravisit::{Visitor, FnKind};
|
use rustc::hir::intravisit::{Visitor, FnKind};
|
||||||
use rustc::hir::map::DefPath;
|
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
|
|
||||||
use std::hash::{Hash, SipHasher};
|
use std::hash::{Hash, SipHasher};
|
||||||
|
|
||||||
pub struct StrictVersionHashVisitor<'a, 'tcx: 'a> {
|
use super::def_path_hash::DefPathHashes;
|
||||||
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
|
pub struct StrictVersionHashVisitor<'a, 'hash: 'a, 'tcx: 'hash> {
|
||||||
|
pub tcx: TyCtxt<'hash, 'tcx, 'tcx>,
|
||||||
pub st: &'a mut SipHasher,
|
pub st: &'a mut SipHasher,
|
||||||
|
|
||||||
|
// collect a deterministic hash of def-ids that we have seen
|
||||||
|
def_path_hashes: &'a mut DefPathHashes<'hash, 'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
|
impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||||
pub fn new(st: &'a mut SipHasher,
|
pub fn new(st: &'a mut SipHasher,
|
||||||
tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
tcx: TyCtxt<'hash, 'tcx, 'tcx>,
|
||||||
|
def_path_hashes: &'a mut DefPathHashes<'hash, 'tcx>)
|
||||||
-> Self {
|
-> Self {
|
||||||
StrictVersionHashVisitor { st: st, tcx: tcx }
|
StrictVersionHashVisitor { st: st, tcx: tcx, def_path_hashes: def_path_hashes }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_def_path(&mut self, path: &DefPath) {
|
fn compute_def_id_hash(&mut self, def_id: DefId) -> u64 {
|
||||||
path.deterministic_hash_to(self.tcx, self.st);
|
self.def_path_hashes.hash(def_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,20 +192,20 @@ pub enum SawStmtComponent {
|
||||||
SawStmtSemi,
|
SawStmtSemi,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
|
impl<'a, 'hash, 'tcx> Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||||
fn visit_nested_item(&mut self, _: ItemId) {
|
fn visit_nested_item(&mut self, _: ItemId) {
|
||||||
// Each item is hashed independently; ignore nested items.
|
// Each item is hashed independently; ignore nested items.
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
|
fn visit_variant_data(&mut self, s: &'tcx VariantData, name: Name,
|
||||||
g: &'a Generics, _: NodeId, _: Span) {
|
g: &'tcx Generics, _: NodeId, _: Span) {
|
||||||
debug!("visit_variant_data: st={:?}", self.st);
|
debug!("visit_variant_data: st={:?}", self.st);
|
||||||
SawStructDef(name.as_str()).hash(self.st);
|
SawStructDef(name.as_str()).hash(self.st);
|
||||||
visit::walk_generics(self, g);
|
visit::walk_generics(self, g);
|
||||||
visit::walk_struct_def(self, s)
|
visit::walk_struct_def(self, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
|
fn visit_variant(&mut self, v: &'tcx Variant, g: &'tcx Generics, item_id: NodeId) {
|
||||||
debug!("visit_variant: st={:?}", self.st);
|
debug!("visit_variant: st={:?}", self.st);
|
||||||
SawVariant.hash(self.st);
|
SawVariant.hash(self.st);
|
||||||
// walk_variant does not call walk_generics, so do it here.
|
// walk_variant does not call walk_generics, so do it here.
|
||||||
|
@ -227,12 +232,12 @@ impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
|
||||||
SawIdent(name.as_str()).hash(self.st);
|
SawIdent(name.as_str()).hash(self.st);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_lifetime(&mut self, l: &'a Lifetime) {
|
fn visit_lifetime(&mut self, l: &'tcx Lifetime) {
|
||||||
debug!("visit_lifetime: st={:?}", self.st);
|
debug!("visit_lifetime: st={:?}", self.st);
|
||||||
SawLifetime(l.name.as_str()).hash(self.st);
|
SawLifetime(l.name.as_str()).hash(self.st);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
|
fn visit_lifetime_def(&mut self, l: &'tcx LifetimeDef) {
|
||||||
debug!("visit_lifetime_def: st={:?}", self.st);
|
debug!("visit_lifetime_def: st={:?}", self.st);
|
||||||
SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
|
SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
|
||||||
}
|
}
|
||||||
|
@ -242,12 +247,12 @@ impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
|
||||||
// monomorphization and cross-crate inlining generally implies
|
// monomorphization and cross-crate inlining generally implies
|
||||||
// that a change to a crate body will require downstream
|
// that a change to a crate body will require downstream
|
||||||
// crates to be recompiled.
|
// crates to be recompiled.
|
||||||
fn visit_expr(&mut self, ex: &'a Expr) {
|
fn visit_expr(&mut self, ex: &'tcx Expr) {
|
||||||
debug!("visit_expr: st={:?}", self.st);
|
debug!("visit_expr: st={:?}", self.st);
|
||||||
SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
|
SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_stmt(&mut self, s: &'a Stmt) {
|
fn visit_stmt(&mut self, s: &'tcx Stmt) {
|
||||||
debug!("visit_stmt: st={:?}", self.st);
|
debug!("visit_stmt: st={:?}", self.st);
|
||||||
|
|
||||||
// We don't want to modify the hash for decls, because
|
// We don't want to modify the hash for decls, because
|
||||||
|
@ -265,7 +270,7 @@ impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
|
||||||
visit::walk_stmt(self, s)
|
visit::walk_stmt(self, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
|
fn visit_foreign_item(&mut self, i: &'tcx ForeignItem) {
|
||||||
debug!("visit_foreign_item: st={:?}", self.st);
|
debug!("visit_foreign_item: st={:?}", self.st);
|
||||||
|
|
||||||
// FIXME (#14132) ideally we would incorporate privacy (or
|
// FIXME (#14132) ideally we would incorporate privacy (or
|
||||||
|
@ -275,7 +280,7 @@ impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
|
||||||
SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
|
SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_item(&mut self, i: &'a Item) {
|
fn visit_item(&mut self, i: &'tcx Item) {
|
||||||
debug!("visit_item: {:?} st={:?}", i, self.st);
|
debug!("visit_item: {:?} st={:?}", i, self.st);
|
||||||
|
|
||||||
// FIXME (#14132) ideally would incorporate reachability
|
// FIXME (#14132) ideally would incorporate reachability
|
||||||
|
@ -285,63 +290,63 @@ impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
|
||||||
SawItem.hash(self.st); visit::walk_item(self, i)
|
SawItem.hash(self.st); visit::walk_item(self, i)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_mod(&mut self, m: &'a Mod, _s: Span, n: NodeId) {
|
fn visit_mod(&mut self, m: &'tcx Mod, _s: Span, n: NodeId) {
|
||||||
debug!("visit_mod: st={:?}", self.st);
|
debug!("visit_mod: st={:?}", self.st);
|
||||||
SawMod.hash(self.st); visit::walk_mod(self, m, n)
|
SawMod.hash(self.st); visit::walk_mod(self, m, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_ty(&mut self, t: &'a Ty) {
|
fn visit_ty(&mut self, t: &'tcx Ty) {
|
||||||
debug!("visit_ty: st={:?}", self.st);
|
debug!("visit_ty: st={:?}", self.st);
|
||||||
SawTy.hash(self.st); visit::walk_ty(self, t)
|
SawTy.hash(self.st); visit::walk_ty(self, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_generics(&mut self, g: &'a Generics) {
|
fn visit_generics(&mut self, g: &'tcx Generics) {
|
||||||
debug!("visit_generics: st={:?}", self.st);
|
debug!("visit_generics: st={:?}", self.st);
|
||||||
SawGenerics.hash(self.st); visit::walk_generics(self, g)
|
SawGenerics.hash(self.st); visit::walk_generics(self, g)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
|
fn visit_fn(&mut self, fk: FnKind<'tcx>, fd: &'tcx FnDecl,
|
||||||
b: &'a Block, s: Span, n: NodeId) {
|
b: &'tcx Block, s: Span, n: NodeId) {
|
||||||
debug!("visit_fn: st={:?}", self.st);
|
debug!("visit_fn: st={:?}", self.st);
|
||||||
SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s, n)
|
SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
|
fn visit_trait_item(&mut self, ti: &'tcx TraitItem) {
|
||||||
debug!("visit_trait_item: st={:?}", self.st);
|
debug!("visit_trait_item: st={:?}", self.st);
|
||||||
SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
|
SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
|
fn visit_impl_item(&mut self, ii: &'tcx ImplItem) {
|
||||||
debug!("visit_impl_item: st={:?}", self.st);
|
debug!("visit_impl_item: st={:?}", self.st);
|
||||||
SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
|
SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_struct_field(&mut self, s: &'a StructField) {
|
fn visit_struct_field(&mut self, s: &'tcx StructField) {
|
||||||
debug!("visit_struct_field: st={:?}", self.st);
|
debug!("visit_struct_field: st={:?}", self.st);
|
||||||
SawStructField.hash(self.st); visit::walk_struct_field(self, s)
|
SawStructField.hash(self.st); visit::walk_struct_field(self, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
|
fn visit_path(&mut self, path: &'tcx Path, _: ast::NodeId) {
|
||||||
debug!("visit_path: st={:?}", self.st);
|
debug!("visit_path: st={:?}", self.st);
|
||||||
SawPath.hash(self.st); visit::walk_path(self, path)
|
SawPath.hash(self.st); visit::walk_path(self, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_block(&mut self, b: &'a Block) {
|
fn visit_block(&mut self, b: &'tcx Block) {
|
||||||
debug!("visit_block: st={:?}", self.st);
|
debug!("visit_block: st={:?}", self.st);
|
||||||
SawBlock.hash(self.st); visit::walk_block(self, b)
|
SawBlock.hash(self.st); visit::walk_block(self, b)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_pat(&mut self, p: &'a Pat) {
|
fn visit_pat(&mut self, p: &'tcx Pat) {
|
||||||
debug!("visit_pat: st={:?}", self.st);
|
debug!("visit_pat: st={:?}", self.st);
|
||||||
SawPat.hash(self.st); visit::walk_pat(self, p)
|
SawPat.hash(self.st); visit::walk_pat(self, p)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_local(&mut self, l: &'a Local) {
|
fn visit_local(&mut self, l: &'tcx Local) {
|
||||||
debug!("visit_local: st={:?}", self.st);
|
debug!("visit_local: st={:?}", self.st);
|
||||||
SawLocal.hash(self.st); visit::walk_local(self, l)
|
SawLocal.hash(self.st); visit::walk_local(self, l)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_arm(&mut self, a: &'a Arm) {
|
fn visit_arm(&mut self, a: &'tcx Arm) {
|
||||||
debug!("visit_arm: st={:?}", self.st);
|
debug!("visit_arm: st={:?}", self.st);
|
||||||
SawArm.hash(self.st); visit::walk_arm(self, a)
|
SawArm.hash(self.st); visit::walk_arm(self, a)
|
||||||
}
|
}
|
||||||
|
@ -361,7 +366,7 @@ pub enum DefHash {
|
||||||
SawErr,
|
SawErr,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
|
impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||||
fn hash_resolve(&mut self, id: ast::NodeId) {
|
fn hash_resolve(&mut self, id: ast::NodeId) {
|
||||||
// Because whether or not a given id has an entry is dependent
|
// Because whether or not a given id has an entry is dependent
|
||||||
// solely on expr variant etc, we don't need to hash whether
|
// solely on expr variant etc, we don't need to hash whether
|
||||||
|
@ -369,20 +374,29 @@ impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
|
||||||
// variant it is above when we visit the HIR).
|
// variant it is above when we visit the HIR).
|
||||||
|
|
||||||
if let Some(def) = self.tcx.def_map.borrow().get(&id) {
|
if let Some(def) = self.tcx.def_map.borrow().get(&id) {
|
||||||
|
debug!("hash_resolve: id={:?} def={:?} st={:?}", id, def, self.st);
|
||||||
self.hash_partial_def(def);
|
self.hash_partial_def(def);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(traits) = self.tcx.trait_map.get(&id) {
|
if let Some(traits) = self.tcx.trait_map.get(&id) {
|
||||||
|
debug!("hash_resolve: id={:?} traits={:?} st={:?}", id, traits, self.st);
|
||||||
traits.len().hash(self.st);
|
traits.len().hash(self.st);
|
||||||
for candidate in traits {
|
|
||||||
self.hash_def_id(candidate.def_id);
|
// The ordering of the candidates is not fixed. So we hash
|
||||||
}
|
// the def-ids and then sort them and hash the collection.
|
||||||
|
let mut candidates: Vec<_> =
|
||||||
|
traits.iter()
|
||||||
|
.map(|&TraitCandidate { def_id, import_id: _ }| {
|
||||||
|
self.compute_def_id_hash(def_id)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
candidates.sort();
|
||||||
|
candidates.hash(self.st);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_def_id(&mut self, def_id: DefId) {
|
fn hash_def_id(&mut self, def_id: DefId) {
|
||||||
let def_path = self.tcx.def_path(def_id);
|
self.compute_def_id_hash(def_id).hash(self.st);
|
||||||
self.hash_def_path(&def_path);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_partial_def(&mut self, def: &PathResolution) {
|
fn hash_partial_def(&mut self, def: &PathResolution) {
|
||||||
|
|
|
@ -38,7 +38,8 @@ mod calculate_svh;
|
||||||
mod persist;
|
mod persist;
|
||||||
|
|
||||||
pub use assert_dep_graph::assert_dep_graph;
|
pub use assert_dep_graph::assert_dep_graph;
|
||||||
pub use calculate_svh::SvhCalculate;
|
pub use calculate_svh::compute_incremental_hashes_map;
|
||||||
|
pub use calculate_svh::IncrementalHashesMap;
|
||||||
pub use persist::load_dep_graph;
|
pub use persist::load_dep_graph;
|
||||||
pub use persist::save_dep_graph;
|
pub use persist::save_dep_graph;
|
||||||
pub use persist::save_trans_partition;
|
pub use persist::save_trans_partition;
|
||||||
|
|
|
@ -133,6 +133,7 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
|
||||||
debug!("assert_dirty({:?})", dep_node);
|
debug!("assert_dirty({:?})", dep_node);
|
||||||
|
|
||||||
match dep_node {
|
match dep_node {
|
||||||
|
DepNode::Krate |
|
||||||
DepNode::Hir(_) => {
|
DepNode::Hir(_) => {
|
||||||
// HIR nodes are inputs, so if we are asserting that the HIR node is
|
// HIR nodes are inputs, so if we are asserting that the HIR node is
|
||||||
// dirty, we check the dirty input set.
|
// dirty, we check the dirty input set.
|
||||||
|
@ -161,6 +162,7 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
|
||||||
debug!("assert_clean({:?})", dep_node);
|
debug!("assert_clean({:?})", dep_node);
|
||||||
|
|
||||||
match dep_node {
|
match dep_node {
|
||||||
|
DepNode::Krate |
|
||||||
DepNode::Hir(_) => {
|
DepNode::Hir(_) => {
|
||||||
// For HIR nodes, check the inputs.
|
// For HIR nodes, check the inputs.
|
||||||
if self.dirty_inputs.contains(&dep_node) {
|
if self.dirty_inputs.contains(&dep_node) {
|
||||||
|
|
|
@ -8,7 +8,6 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use calculate_svh::SvhCalculate;
|
|
||||||
use rbml::Error;
|
use rbml::Error;
|
||||||
use rbml::opaque::Decoder;
|
use rbml::opaque::Decoder;
|
||||||
use rustc::dep_graph::DepNode;
|
use rustc::dep_graph::DepNode;
|
||||||
|
@ -21,19 +20,24 @@ use std::io::{ErrorKind, Read};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
|
use IncrementalHashesMap;
|
||||||
use super::data::*;
|
use super::data::*;
|
||||||
use super::util::*;
|
use super::util::*;
|
||||||
|
|
||||||
pub struct HashContext<'a, 'tcx: 'a> {
|
pub struct HashContext<'a, 'tcx: 'a> {
|
||||||
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
incremental_hashes_map: &'a IncrementalHashesMap,
|
||||||
item_metadata_hashes: FnvHashMap<DefId, u64>,
|
item_metadata_hashes: FnvHashMap<DefId, u64>,
|
||||||
crate_hashes: FnvHashMap<ast::CrateNum, Svh>,
|
crate_hashes: FnvHashMap<ast::CrateNum, Svh>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
|
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
incremental_hashes_map: &'a IncrementalHashesMap)
|
||||||
|
-> Self {
|
||||||
HashContext {
|
HashContext {
|
||||||
tcx: tcx,
|
tcx: tcx,
|
||||||
|
incremental_hashes_map: incremental_hashes_map,
|
||||||
item_metadata_hashes: FnvHashMap(),
|
item_metadata_hashes: FnvHashMap(),
|
||||||
crate_hashes: FnvHashMap(),
|
crate_hashes: FnvHashMap(),
|
||||||
}
|
}
|
||||||
|
@ -41,17 +45,32 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
|
|
||||||
pub fn is_hashable(dep_node: &DepNode<DefId>) -> bool {
|
pub fn is_hashable(dep_node: &DepNode<DefId>) -> bool {
|
||||||
match *dep_node {
|
match *dep_node {
|
||||||
|
DepNode::Krate |
|
||||||
DepNode::Hir(_) => true,
|
DepNode::Hir(_) => true,
|
||||||
DepNode::MetaData(def_id) => !def_id.is_local(),
|
DepNode::MetaData(def_id) => !def_id.is_local(),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<(DefId, u64)> {
|
pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<u64> {
|
||||||
match *dep_node {
|
match *dep_node {
|
||||||
|
DepNode::Krate => {
|
||||||
|
Some(self.incremental_hashes_map[dep_node])
|
||||||
|
}
|
||||||
|
|
||||||
// HIR nodes (which always come from our crate) are an input:
|
// HIR nodes (which always come from our crate) are an input:
|
||||||
DepNode::Hir(def_id) => {
|
DepNode::Hir(def_id) => {
|
||||||
Some((def_id, self.hir_hash(def_id)))
|
assert!(def_id.is_local(),
|
||||||
|
"cannot hash HIR for non-local def-id {:?} => {:?}",
|
||||||
|
def_id,
|
||||||
|
self.tcx.item_path_str(def_id));
|
||||||
|
|
||||||
|
assert!(!self.tcx.map.is_inlined_def_id(def_id),
|
||||||
|
"cannot hash HIR for inlined def-id {:?} => {:?}",
|
||||||
|
def_id,
|
||||||
|
self.tcx.item_path_str(def_id));
|
||||||
|
|
||||||
|
Some(self.incremental_hashes_map[dep_node])
|
||||||
}
|
}
|
||||||
|
|
||||||
// MetaData from other crates is an *input* to us.
|
// MetaData from other crates is an *input* to us.
|
||||||
|
@ -59,7 +78,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
// don't hash them, but we do compute a hash for them and
|
// don't hash them, but we do compute a hash for them and
|
||||||
// save it for others to use.
|
// save it for others to use.
|
||||||
DepNode::MetaData(def_id) if !def_id.is_local() => {
|
DepNode::MetaData(def_id) if !def_id.is_local() => {
|
||||||
Some((def_id, self.metadata_hash(def_id)))
|
Some(self.metadata_hash(def_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -72,21 +91,6 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hir_hash(&mut self, def_id: DefId) -> u64 {
|
|
||||||
assert!(def_id.is_local(),
|
|
||||||
"cannot hash HIR for non-local def-id {:?} => {:?}",
|
|
||||||
def_id,
|
|
||||||
self.tcx.item_path_str(def_id));
|
|
||||||
|
|
||||||
assert!(!self.tcx.map.is_inlined_def_id(def_id),
|
|
||||||
"cannot hash HIR for inlined def-id {:?} => {:?}",
|
|
||||||
def_id,
|
|
||||||
self.tcx.item_path_str(def_id));
|
|
||||||
|
|
||||||
// FIXME(#32753) -- should we use a distinct hash here
|
|
||||||
self.tcx.calculate_item_hash(def_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn metadata_hash(&mut self, def_id: DefId) -> u64 {
|
fn metadata_hash(&mut self, def_id: DefId) -> u64 {
|
||||||
debug!("metadata_hash(def_id={:?})", def_id);
|
debug!("metadata_hash(def_id={:?})", def_id);
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ use std::io::Read;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::path::{Path};
|
use std::path::{Path};
|
||||||
|
|
||||||
|
use IncrementalHashesMap;
|
||||||
use super::data::*;
|
use super::data::*;
|
||||||
use super::directory::*;
|
use super::directory::*;
|
||||||
use super::dirty_clean;
|
use super::dirty_clean;
|
||||||
|
@ -38,16 +39,18 @@ type CleanEdges = Vec<(DepNode<DefId>, DepNode<DefId>)>;
|
||||||
/// early in compilation, before we've really done any work, but
|
/// early in compilation, before we've really done any work, but
|
||||||
/// actually it doesn't matter all that much.) See `README.md` for
|
/// actually it doesn't matter all that much.) See `README.md` for
|
||||||
/// more general overview.
|
/// more general overview.
|
||||||
pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
incremental_hashes_map: &IncrementalHashesMap) {
|
||||||
if tcx.sess.opts.incremental.is_none() {
|
if tcx.sess.opts.incremental.is_none() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ignore = tcx.dep_graph.in_ignore();
|
let _ignore = tcx.dep_graph.in_ignore();
|
||||||
load_dep_graph_if_exists(tcx);
|
load_dep_graph_if_exists(tcx, incremental_hashes_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
incremental_hashes_map: &IncrementalHashesMap) {
|
||||||
let dep_graph_path = dep_graph_path(tcx).unwrap();
|
let dep_graph_path = dep_graph_path(tcx).unwrap();
|
||||||
let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
|
let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
|
||||||
Some(p) => p,
|
Some(p) => p,
|
||||||
|
@ -60,7 +63,7 @@ fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||||
None => return // no file
|
None => return // no file
|
||||||
};
|
};
|
||||||
|
|
||||||
match decode_dep_graph(tcx, &dep_graph_data, &work_products_data) {
|
match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) {
|
||||||
Ok(dirty_nodes) => dirty_nodes,
|
Ok(dirty_nodes) => dirty_nodes,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
tcx.sess.warn(
|
tcx.sess.warn(
|
||||||
|
@ -97,6 +100,7 @@ fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
|
||||||
/// Decode the dep graph and load the edges/nodes that are still clean
|
/// Decode the dep graph and load the edges/nodes that are still clean
|
||||||
/// into `tcx.dep_graph`.
|
/// into `tcx.dep_graph`.
|
||||||
pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
incremental_hashes_map: &IncrementalHashesMap,
|
||||||
dep_graph_data: &[u8],
|
dep_graph_data: &[u8],
|
||||||
work_products_data: &[u8])
|
work_products_data: &[u8])
|
||||||
-> Result<(), Error>
|
-> Result<(), Error>
|
||||||
|
@ -133,7 +137,10 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
// reason for this is that this way we can include nodes that have
|
// reason for this is that this way we can include nodes that have
|
||||||
// been removed (which no longer have a `DefId` in the current
|
// been removed (which no longer have a `DefId` in the current
|
||||||
// compilation).
|
// compilation).
|
||||||
let dirty_raw_source_nodes = dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
|
let dirty_raw_source_nodes = dirty_nodes(tcx,
|
||||||
|
incremental_hashes_map,
|
||||||
|
&serialized_dep_graph.hashes,
|
||||||
|
&retraced);
|
||||||
|
|
||||||
// Create a list of (raw-source-node ->
|
// Create a list of (raw-source-node ->
|
||||||
// retracted-target-node) edges. In the process of retracing the
|
// retracted-target-node) edges. In the process of retracing the
|
||||||
|
@ -206,15 +213,16 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
/// Computes which of the original set of def-ids are dirty. Stored in
|
/// Computes which of the original set of def-ids are dirty. Stored in
|
||||||
/// a bit vector where the index is the DefPathIndex.
|
/// a bit vector where the index is the DefPathIndex.
|
||||||
fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
hashes: &[SerializedHash],
|
incremental_hashes_map: &IncrementalHashesMap,
|
||||||
|
serialized_hashes: &[SerializedHash],
|
||||||
retraced: &RetracedDefIdDirectory)
|
retraced: &RetracedDefIdDirectory)
|
||||||
-> DirtyNodes {
|
-> DirtyNodes {
|
||||||
let mut hcx = HashContext::new(tcx);
|
let mut hcx = HashContext::new(tcx, incremental_hashes_map);
|
||||||
let mut dirty_nodes = FnvHashSet();
|
let mut dirty_nodes = FnvHashSet();
|
||||||
|
|
||||||
for hash in hashes {
|
for hash in serialized_hashes {
|
||||||
if let Some(dep_node) = retraced.map(&hash.dep_node) {
|
if let Some(dep_node) = retraced.map(&hash.dep_node) {
|
||||||
let (_, current_hash) = hcx.hash(&dep_node).unwrap();
|
let current_hash = hcx.hash(&dep_node).unwrap();
|
||||||
if current_hash == hash.hash {
|
if current_hash == hash.hash {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,7 +62,7 @@ impl<'q> Predecessors<'q> {
|
||||||
let mut hashes = FnvHashMap();
|
let mut hashes = FnvHashMap();
|
||||||
for input in inputs.values().flat_map(|v| v.iter().cloned()) {
|
for input in inputs.values().flat_map(|v| v.iter().cloned()) {
|
||||||
hashes.entry(input)
|
hashes.entry(input)
|
||||||
.or_insert_with(|| hcx.hash(input).unwrap().1);
|
.or_insert_with(|| hcx.hash(input).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
Predecessors {
|
Predecessors {
|
||||||
|
|
|
@ -21,20 +21,22 @@ use std::io::{self, Cursor, Write};
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use IncrementalHashesMap;
|
||||||
use super::data::*;
|
use super::data::*;
|
||||||
use super::directory::*;
|
use super::directory::*;
|
||||||
use super::hash::*;
|
use super::hash::*;
|
||||||
use super::preds::*;
|
use super::preds::*;
|
||||||
use super::util::*;
|
use super::util::*;
|
||||||
|
|
||||||
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
incremental_hashes_map: &IncrementalHashesMap) {
|
||||||
debug!("save_dep_graph()");
|
debug!("save_dep_graph()");
|
||||||
let _ignore = tcx.dep_graph.in_ignore();
|
let _ignore = tcx.dep_graph.in_ignore();
|
||||||
let sess = tcx.sess;
|
let sess = tcx.sess;
|
||||||
if sess.opts.incremental.is_none() {
|
if sess.opts.incremental.is_none() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let mut hcx = HashContext::new(tcx);
|
let mut hcx = HashContext::new(tcx, incremental_hashes_map);
|
||||||
let mut builder = DefIdDirectoryBuilder::new(tcx);
|
let mut builder = DefIdDirectoryBuilder::new(tcx);
|
||||||
let query = tcx.dep_graph.query();
|
let query = tcx.dep_graph.query();
|
||||||
let preds = Predecessors::new(&query, &mut hcx);
|
let preds = Predecessors::new(&query, &mut hcx);
|
||||||
|
|
|
@ -26,10 +26,10 @@ use CrateTranslation;
|
||||||
use util::common::time;
|
use util::common::time;
|
||||||
use util::fs::fix_windows_verbatim_for_gcc;
|
use util::fs::fix_windows_verbatim_for_gcc;
|
||||||
use rustc::dep_graph::DepNode;
|
use rustc::dep_graph::DepNode;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::hir::svh::Svh;
|
||||||
use rustc_back::tempdir::TempDir;
|
use rustc_back::tempdir::TempDir;
|
||||||
|
use rustc_incremental::IncrementalHashesMap;
|
||||||
|
|
||||||
use rustc_incremental::SvhCalculate;
|
|
||||||
use std::ascii;
|
use std::ascii;
|
||||||
use std::char;
|
use std::char;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
@ -125,12 +125,12 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_link_meta<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub fn build_link_meta(incremental_hashes_map: &IncrementalHashesMap,
|
||||||
name: &str)
|
name: &str)
|
||||||
-> LinkMeta {
|
-> LinkMeta {
|
||||||
let r = LinkMeta {
|
let r = LinkMeta {
|
||||||
crate_name: name.to_owned(),
|
crate_name: name.to_owned(),
|
||||||
crate_hash: tcx.calculate_krate_hash(),
|
crate_hash: Svh::new(incremental_hashes_map[&DepNode::Krate]),
|
||||||
};
|
};
|
||||||
info!("{:?}", r);
|
info!("{:?}", r);
|
||||||
return r;
|
return r;
|
||||||
|
|
|
@ -48,6 +48,7 @@ use rustc::hir::map as hir_map;
|
||||||
use rustc::util::common::time;
|
use rustc::util::common::time;
|
||||||
use rustc::mir::mir_map::MirMap;
|
use rustc::mir::mir_map::MirMap;
|
||||||
use rustc_data_structures::graph::OUTGOING;
|
use rustc_data_structures::graph::OUTGOING;
|
||||||
|
use rustc_incremental::IncrementalHashesMap;
|
||||||
use session::config::{self, NoDebugInfo, FullDebugInfo};
|
use session::config::{self, NoDebugInfo, FullDebugInfo};
|
||||||
use session::Session;
|
use session::Session;
|
||||||
use _match;
|
use _match;
|
||||||
|
@ -2481,7 +2482,8 @@ pub fn filter_reachable_ids(tcx: TyCtxt, reachable: NodeSet) -> NodeSet {
|
||||||
|
|
||||||
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
mir_map: &MirMap<'tcx>,
|
mir_map: &MirMap<'tcx>,
|
||||||
analysis: ty::CrateAnalysis)
|
analysis: ty::CrateAnalysis,
|
||||||
|
incremental_hashes_map: &IncrementalHashesMap)
|
||||||
-> CrateTranslation {
|
-> CrateTranslation {
|
||||||
let _task = tcx.dep_graph.in_task(DepNode::TransCrate);
|
let _task = tcx.dep_graph.in_task(DepNode::TransCrate);
|
||||||
|
|
||||||
|
@ -2506,7 +2508,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
tcx.sess.opts.debug_assertions
|
tcx.sess.opts.debug_assertions
|
||||||
};
|
};
|
||||||
|
|
||||||
let link_meta = link::build_link_meta(tcx, name);
|
let link_meta = link::build_link_meta(incremental_hashes_map, name);
|
||||||
|
|
||||||
let shared_ccx = SharedCrateContext::new(tcx,
|
let shared_ccx = SharedCrateContext::new(tcx,
|
||||||
&mir_map,
|
&mir_map,
|
||||||
|
|
|
@ -159,7 +159,7 @@ pub fn run_core(search_paths: SearchPaths,
|
||||||
resolutions,
|
resolutions,
|
||||||
&arenas,
|
&arenas,
|
||||||
&name,
|
&name,
|
||||||
|tcx, _, analysis, result| {
|
|tcx, _, analysis, _, result| {
|
||||||
if let Err(_) = result {
|
if let Err(_) = result {
|
||||||
sess.fatal("Compilation failed, aborting rustdoc");
|
sess.fatal("Compilation failed, aborting rustdoc");
|
||||||
}
|
}
|
||||||
|
|
39
src/test/incremental/crate_hash_reorder.rs
Normal file
39
src/test/incremental/crate_hash_reorder.rs
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// Test that the crate hash is not affected by reordering items.
|
||||||
|
|
||||||
|
// revisions:rpass1 rpass2 rpass3
|
||||||
|
// compile-flags: -Z query-dep-graph
|
||||||
|
|
||||||
|
#![feature(rustc_attrs)]
|
||||||
|
|
||||||
|
// Check that reordering otherwise identical items is not considered a
|
||||||
|
// change at all.
|
||||||
|
#[rustc_clean(label="Krate", cfg="rpass2")]
|
||||||
|
|
||||||
|
// But removing an item, naturally, is.
|
||||||
|
#[rustc_dirty(label="Krate", cfg="rpass3")]
|
||||||
|
|
||||||
|
#[cfg(rpass1)]
|
||||||
|
pub struct X {
|
||||||
|
pub x: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Y {
|
||||||
|
pub x: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(rpass2)]
|
||||||
|
pub struct X {
|
||||||
|
pub x: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn main() { }
|
21
src/test/incremental/issue-35593.rs
Normal file
21
src/test/incremental/issue-35593.rs
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// Regression test for #35593. Check that we can reuse this trivially
|
||||||
|
// equal example.
|
||||||
|
|
||||||
|
// revisions:rpass1 rpass2
|
||||||
|
|
||||||
|
#![feature(rustc_attrs)]
|
||||||
|
#![rustc_partition_reused(module="issue_35593", cfg="rpass2")]
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
println!("hello world");
|
||||||
|
}
|
|
@ -17,10 +17,6 @@
|
||||||
// Here the only thing which changes is the string constant in `x`.
|
// Here the only thing which changes is the string constant in `x`.
|
||||||
// Therefore, the compiler deduces (correctly) that typeck is not
|
// Therefore, the compiler deduces (correctly) that typeck is not
|
||||||
// needed even for callers of `x`.
|
// needed even for callers of `x`.
|
||||||
//
|
|
||||||
// It is not entirely clear why `TransCrateItem` invalidates `y` and
|
|
||||||
// `z`, actually, I think it's because of the structure of
|
|
||||||
// trans. -nmatsakis
|
|
||||||
|
|
||||||
fn main() { }
|
fn main() { }
|
||||||
|
|
||||||
|
@ -41,10 +37,8 @@ mod x {
|
||||||
mod y {
|
mod y {
|
||||||
use x;
|
use x;
|
||||||
|
|
||||||
// FIXME(#35078) -- when body of `x` changes, we treat it as
|
#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
|
||||||
// though signature changed.
|
#[rustc_clean(label="TransCrateItem", cfg="rpass2")]
|
||||||
#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
|
|
||||||
#[rustc_dirty(label="TransCrateItem", cfg="rpass2")]
|
|
||||||
pub fn y() {
|
pub fn y() {
|
||||||
x::x();
|
x::x();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue