incr.comp.: Hash more pieces of crate metadata to detect changes there.
This commit is contained in:
parent
70198a0a44
commit
6a5e2a5a9e
32 changed files with 1531 additions and 882 deletions
|
@ -51,6 +51,9 @@ pub enum DepNode<D: Clone + Debug> {
|
||||||
// in an extern crate.
|
// in an extern crate.
|
||||||
MetaData(D),
|
MetaData(D),
|
||||||
|
|
||||||
|
// Represents some piece of metadata global to its crate.
|
||||||
|
GlobalMetaData(D, GlobalMetaDataKind),
|
||||||
|
|
||||||
// Represents some artifact that we save to disk. Note that these
|
// Represents some artifact that we save to disk. Note that these
|
||||||
// do not have a def-id as part of their identifier.
|
// do not have a def-id as part of their identifier.
|
||||||
WorkProduct(Arc<WorkProductId>),
|
WorkProduct(Arc<WorkProductId>),
|
||||||
|
@ -79,7 +82,6 @@ pub enum DepNode<D: Clone + Debug> {
|
||||||
MirKeys,
|
MirKeys,
|
||||||
LateLintCheck,
|
LateLintCheck,
|
||||||
TransCrateItem(D),
|
TransCrateItem(D),
|
||||||
TransInlinedItem(D),
|
|
||||||
TransWriteMetadata,
|
TransWriteMetadata,
|
||||||
CrateVariances,
|
CrateVariances,
|
||||||
|
|
||||||
|
@ -157,6 +159,7 @@ pub enum DepNode<D: Clone + Debug> {
|
||||||
DefSpan(D),
|
DefSpan(D),
|
||||||
Stability(D),
|
Stability(D),
|
||||||
Deprecation(D),
|
Deprecation(D),
|
||||||
|
FileMap(D, Arc<String>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D: Clone + Debug> DepNode<D> {
|
impl<D: Clone + Debug> DepNode<D> {
|
||||||
|
@ -234,7 +237,6 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
RegionMaps(ref d) => op(d).map(RegionMaps),
|
RegionMaps(ref d) => op(d).map(RegionMaps),
|
||||||
RvalueCheck(ref d) => op(d).map(RvalueCheck),
|
RvalueCheck(ref d) => op(d).map(RvalueCheck),
|
||||||
TransCrateItem(ref d) => op(d).map(TransCrateItem),
|
TransCrateItem(ref d) => op(d).map(TransCrateItem),
|
||||||
TransInlinedItem(ref d) => op(d).map(TransInlinedItem),
|
|
||||||
AssociatedItems(ref d) => op(d).map(AssociatedItems),
|
AssociatedItems(ref d) => op(d).map(AssociatedItems),
|
||||||
ItemSignature(ref d) => op(d).map(ItemSignature),
|
ItemSignature(ref d) => op(d).map(ItemSignature),
|
||||||
ItemVariances(ref d) => op(d).map(ItemVariances),
|
ItemVariances(ref d) => op(d).map(ItemVariances),
|
||||||
|
@ -271,6 +273,8 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
DefSpan(ref d) => op(d).map(DefSpan),
|
DefSpan(ref d) => op(d).map(DefSpan),
|
||||||
Stability(ref d) => op(d).map(Stability),
|
Stability(ref d) => op(d).map(Stability),
|
||||||
Deprecation(ref d) => op(d).map(Deprecation),
|
Deprecation(ref d) => op(d).map(Deprecation),
|
||||||
|
GlobalMetaData(ref d, kind) => op(d).map(|d| GlobalMetaData(d, kind)),
|
||||||
|
FileMap(ref d, ref file_name) => op(d).map(|d| FileMap(d, file_name.clone())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -282,3 +286,16 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
/// them even in the absence of a tcx.)
|
/// them even in the absence of a tcx.)
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
|
||||||
pub struct WorkProductId(pub String);
|
pub struct WorkProductId(pub String);
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
|
||||||
|
pub enum GlobalMetaDataKind {
|
||||||
|
Krate,
|
||||||
|
CrateDeps,
|
||||||
|
DylibDependencyFormats,
|
||||||
|
LangItems,
|
||||||
|
LangItemsMissing,
|
||||||
|
NativeLibraries,
|
||||||
|
CodeMap,
|
||||||
|
Impls,
|
||||||
|
ExportedSymbols,
|
||||||
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ mod thread;
|
||||||
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
|
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
|
||||||
pub use self::dep_node::DepNode;
|
pub use self::dep_node::DepNode;
|
||||||
pub use self::dep_node::WorkProductId;
|
pub use self::dep_node::WorkProductId;
|
||||||
|
pub use self::dep_node::GlobalMetaDataKind;
|
||||||
pub use self::graph::DepGraph;
|
pub use self::graph::DepGraph;
|
||||||
pub use self::graph::WorkProduct;
|
pub use self::graph::WorkProduct;
|
||||||
pub use self::query::DepGraphQuery;
|
pub use self::query::DepGraphQuery;
|
||||||
|
|
|
@ -66,3 +66,7 @@ impl Decodable for Svh {
|
||||||
.map(Svh::new)
|
.map(Svh::new)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl_stable_hash_for!(struct Svh {
|
||||||
|
hash
|
||||||
|
});
|
||||||
|
|
|
@ -8,10 +8,14 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ty::TyCtxt;
|
use dep_graph::{DepGraph, DepNode};
|
||||||
|
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
|
||||||
|
use rustc_data_structures::bitvec::BitVector;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
use syntax::codemap::CodeMap;
|
use syntax::codemap::CodeMap;
|
||||||
use syntax_pos::{BytePos, FileMap};
|
use syntax_pos::{BytePos, FileMap};
|
||||||
|
use ty::TyCtxt;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct CacheEntry {
|
struct CacheEntry {
|
||||||
|
@ -20,30 +24,37 @@ struct CacheEntry {
|
||||||
line_start: BytePos,
|
line_start: BytePos,
|
||||||
line_end: BytePos,
|
line_end: BytePos,
|
||||||
file: Rc<FileMap>,
|
file: Rc<FileMap>,
|
||||||
|
file_index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CachingCodemapView<'tcx> {
|
pub struct CachingCodemapView<'tcx> {
|
||||||
codemap: &'tcx CodeMap,
|
codemap: &'tcx CodeMap,
|
||||||
line_cache: [CacheEntry; 3],
|
line_cache: [CacheEntry; 3],
|
||||||
time_stamp: usize,
|
time_stamp: usize,
|
||||||
|
dep_graph: DepGraph,
|
||||||
|
dep_tracking_reads: BitVector,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> CachingCodemapView<'tcx> {
|
impl<'tcx> CachingCodemapView<'tcx> {
|
||||||
pub fn new<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CachingCodemapView<'tcx> {
|
pub fn new<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CachingCodemapView<'tcx> {
|
||||||
let codemap = tcx.sess.codemap();
|
let codemap = tcx.sess.codemap();
|
||||||
let first_file = codemap.files.borrow()[0].clone();
|
let files = codemap.files_untracked();
|
||||||
|
let first_file = files[0].clone();
|
||||||
let entry = CacheEntry {
|
let entry = CacheEntry {
|
||||||
time_stamp: 0,
|
time_stamp: 0,
|
||||||
line_number: 0,
|
line_number: 0,
|
||||||
line_start: BytePos(0),
|
line_start: BytePos(0),
|
||||||
line_end: BytePos(0),
|
line_end: BytePos(0),
|
||||||
file: first_file,
|
file: first_file,
|
||||||
|
file_index: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
CachingCodemapView {
|
CachingCodemapView {
|
||||||
|
dep_graph: tcx.dep_graph.clone(),
|
||||||
codemap: codemap,
|
codemap: codemap,
|
||||||
line_cache: [entry.clone(), entry.clone(), entry.clone()],
|
line_cache: [entry.clone(), entry.clone(), entry.clone()],
|
||||||
time_stamp: 0,
|
time_stamp: 0,
|
||||||
|
dep_tracking_reads: BitVector::new(files.len()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,6 +67,10 @@ impl<'tcx> CachingCodemapView<'tcx> {
|
||||||
for cache_entry in self.line_cache.iter_mut() {
|
for cache_entry in self.line_cache.iter_mut() {
|
||||||
if pos >= cache_entry.line_start && pos < cache_entry.line_end {
|
if pos >= cache_entry.line_start && pos < cache_entry.line_end {
|
||||||
cache_entry.time_stamp = self.time_stamp;
|
cache_entry.time_stamp = self.time_stamp;
|
||||||
|
if self.dep_tracking_reads.insert(cache_entry.file_index) {
|
||||||
|
self.dep_graph.read(dep_node(cache_entry));
|
||||||
|
}
|
||||||
|
|
||||||
return Some((cache_entry.file.clone(),
|
return Some((cache_entry.file.clone(),
|
||||||
cache_entry.line_number,
|
cache_entry.line_number,
|
||||||
pos - cache_entry.line_start));
|
pos - cache_entry.line_start));
|
||||||
|
@ -75,7 +90,7 @@ impl<'tcx> CachingCodemapView<'tcx> {
|
||||||
// If the entry doesn't point to the correct file, fix it up
|
// If the entry doesn't point to the correct file, fix it up
|
||||||
if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos {
|
if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos {
|
||||||
let file_valid;
|
let file_valid;
|
||||||
let files = self.codemap.files.borrow();
|
let files = self.codemap.files_untracked();
|
||||||
|
|
||||||
if files.len() > 0 {
|
if files.len() > 0 {
|
||||||
let file_index = self.codemap.lookup_filemap_idx(pos);
|
let file_index = self.codemap.lookup_filemap_idx(pos);
|
||||||
|
@ -83,6 +98,7 @@ impl<'tcx> CachingCodemapView<'tcx> {
|
||||||
|
|
||||||
if pos >= file.start_pos && pos < file.end_pos {
|
if pos >= file.start_pos && pos < file.end_pos {
|
||||||
cache_entry.file = file;
|
cache_entry.file = file;
|
||||||
|
cache_entry.file_index = file_index;
|
||||||
file_valid = true;
|
file_valid = true;
|
||||||
} else {
|
} else {
|
||||||
file_valid = false;
|
file_valid = false;
|
||||||
|
@ -104,8 +120,21 @@ impl<'tcx> CachingCodemapView<'tcx> {
|
||||||
cache_entry.line_end = line_bounds.1;
|
cache_entry.line_end = line_bounds.1;
|
||||||
cache_entry.time_stamp = self.time_stamp;
|
cache_entry.time_stamp = self.time_stamp;
|
||||||
|
|
||||||
|
if self.dep_tracking_reads.insert(cache_entry.file_index) {
|
||||||
|
self.dep_graph.read(dep_node(cache_entry));
|
||||||
|
}
|
||||||
|
|
||||||
return Some((cache_entry.file.clone(),
|
return Some((cache_entry.file.clone(),
|
||||||
cache_entry.line_number,
|
cache_entry.line_number,
|
||||||
pos - cache_entry.line_start));
|
pos - cache_entry.line_start));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn dep_node(cache_entry: &CacheEntry) -> DepNode<DefId> {
|
||||||
|
let def_id = DefId {
|
||||||
|
krate: CrateNum::from_u32(cache_entry.file.crate_of_origin),
|
||||||
|
index: CRATE_DEF_INDEX,
|
||||||
|
};
|
||||||
|
let name = Arc::new(cache_entry.file.name.clone());
|
||||||
|
DepNode::FileMap(def_id, name)
|
||||||
|
}
|
||||||
|
|
40
src/librustc/ich/impls_cstore.rs
Normal file
40
src/librustc/ich/impls_cstore.rs
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
//! This module contains `HashStable` implementations for various data types
|
||||||
|
//! from rustc::middle::cstore in no particular order.
|
||||||
|
|
||||||
|
use middle;
|
||||||
|
|
||||||
|
impl_stable_hash_for!(enum middle::cstore::DepKind {
|
||||||
|
UnexportedMacrosOnly,
|
||||||
|
MacrosOnly,
|
||||||
|
Implicit,
|
||||||
|
Explicit
|
||||||
|
});
|
||||||
|
|
||||||
|
impl_stable_hash_for!(enum middle::cstore::NativeLibraryKind {
|
||||||
|
NativeStatic,
|
||||||
|
NativeStaticNobundle,
|
||||||
|
NativeFramework,
|
||||||
|
NativeUnknown
|
||||||
|
});
|
||||||
|
|
||||||
|
impl_stable_hash_for!(struct middle::cstore::NativeLibrary {
|
||||||
|
kind,
|
||||||
|
name,
|
||||||
|
cfg,
|
||||||
|
foreign_items
|
||||||
|
});
|
||||||
|
|
||||||
|
impl_stable_hash_for!(enum middle::cstore::LinkagePreference {
|
||||||
|
RequireDynamic,
|
||||||
|
RequireStatic
|
||||||
|
});
|
|
@ -1120,3 +1120,11 @@ impl_stable_hash_for!(struct hir::def::Export {
|
||||||
def,
|
def,
|
||||||
span
|
span
|
||||||
});
|
});
|
||||||
|
|
||||||
|
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ::middle::lang_items::LangItem {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
_: &mut StableHashingContext<'a, 'tcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
::std::hash::Hash::hash(self, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -19,7 +19,9 @@ use std::mem;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::{Span, FileMap};
|
||||||
|
|
||||||
|
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
|
||||||
|
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||||
StableHasherResult};
|
StableHasherResult};
|
||||||
|
@ -299,3 +301,79 @@ fn hash_token<'a, 'tcx, W: StableHasherResult>(token: &token::Token,
|
||||||
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
|
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl_stable_hash_for_spanned!(::syntax::ast::NestedMetaItemKind);
|
||||||
|
|
||||||
|
impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItemKind {
|
||||||
|
MetaItem(meta_item),
|
||||||
|
Literal(lit)
|
||||||
|
});
|
||||||
|
|
||||||
|
impl_stable_hash_for!(struct ::syntax::ast::MetaItem {
|
||||||
|
name,
|
||||||
|
node,
|
||||||
|
span
|
||||||
|
});
|
||||||
|
|
||||||
|
impl_stable_hash_for!(enum ::syntax::ast::MetaItemKind {
|
||||||
|
Word,
|
||||||
|
List(nested_items),
|
||||||
|
NameValue(lit)
|
||||||
|
});
|
||||||
|
|
||||||
|
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for FileMap {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let FileMap {
|
||||||
|
ref name,
|
||||||
|
name_was_remapped,
|
||||||
|
crate_of_origin,
|
||||||
|
// Do not hash the source as it is not encoded
|
||||||
|
src: _,
|
||||||
|
start_pos,
|
||||||
|
end_pos: _,
|
||||||
|
ref lines,
|
||||||
|
ref multibyte_chars,
|
||||||
|
} = *self;
|
||||||
|
|
||||||
|
name.hash_stable(hcx, hasher);
|
||||||
|
name_was_remapped.hash_stable(hcx, hasher);
|
||||||
|
|
||||||
|
DefId {
|
||||||
|
krate: CrateNum::from_u32(crate_of_origin),
|
||||||
|
index: CRATE_DEF_INDEX,
|
||||||
|
}.hash_stable(hcx, hasher);
|
||||||
|
|
||||||
|
// We only hash the relative position within this filemap
|
||||||
|
let lines = lines.borrow();
|
||||||
|
lines.len().hash_stable(hcx, hasher);
|
||||||
|
for &line in lines.iter() {
|
||||||
|
stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We only hash the relative position within this filemap
|
||||||
|
let multibyte_chars = multibyte_chars.borrow();
|
||||||
|
multibyte_chars.len().hash_stable(hcx, hasher);
|
||||||
|
for &char_pos in multibyte_chars.iter() {
|
||||||
|
stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stable_byte_pos(pos: ::syntax_pos::BytePos,
|
||||||
|
filemap_start: ::syntax_pos::BytePos)
|
||||||
|
-> u32 {
|
||||||
|
pos.0 - filemap_start.0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stable_multibyte_char(mbc: ::syntax_pos::MultiByteChar,
|
||||||
|
filemap_start: ::syntax_pos::BytePos)
|
||||||
|
-> (u32, u32) {
|
||||||
|
let ::syntax_pos::MultiByteChar {
|
||||||
|
pos,
|
||||||
|
bytes,
|
||||||
|
} = mbc;
|
||||||
|
|
||||||
|
(pos.0 - filemap_start.0, bytes as u32)
|
||||||
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ mod caching_codemap_view;
|
||||||
mod hcx;
|
mod hcx;
|
||||||
|
|
||||||
mod impls_const_math;
|
mod impls_const_math;
|
||||||
|
mod impls_cstore;
|
||||||
mod impls_hir;
|
mod impls_hir;
|
||||||
mod impls_mir;
|
mod impls_mir;
|
||||||
mod impls_ty;
|
mod impls_ty;
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
// probably get a better home if someone can find one.
|
// probably get a better home if someone can find one.
|
||||||
|
|
||||||
use hir::def;
|
use hir::def;
|
||||||
|
use dep_graph::DepNode;
|
||||||
use hir::def_id::{CrateNum, DefId, DefIndex};
|
use hir::def_id::{CrateNum, DefId, DefIndex};
|
||||||
use hir::map as hir_map;
|
use hir::map as hir_map;
|
||||||
use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData};
|
use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData};
|
||||||
|
@ -161,7 +162,16 @@ pub struct ExternCrate {
|
||||||
|
|
||||||
pub struct EncodedMetadata {
|
pub struct EncodedMetadata {
|
||||||
pub raw_data: Vec<u8>,
|
pub raw_data: Vec<u8>,
|
||||||
pub hashes: Vec<EncodedMetadataHash>,
|
pub hashes: EncodedMetadataHashes,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EncodedMetadata {
|
||||||
|
pub fn new() -> EncodedMetadata {
|
||||||
|
EncodedMetadata {
|
||||||
|
raw_data: Vec::new(),
|
||||||
|
hashes: EncodedMetadataHashes::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The hash for some metadata that (when saving) will be exported
|
/// The hash for some metadata that (when saving) will be exported
|
||||||
|
@ -173,6 +183,24 @@ pub struct EncodedMetadataHash {
|
||||||
pub hash: ich::Fingerprint,
|
pub hash: ich::Fingerprint,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The hash for some metadata that (when saving) will be exported
|
||||||
|
/// from this crate, or which (when importing) was exported by an
|
||||||
|
/// upstream crate.
|
||||||
|
#[derive(Debug, RustcEncodable, RustcDecodable, Clone)]
|
||||||
|
pub struct EncodedMetadataHashes {
|
||||||
|
pub entry_hashes: Vec<EncodedMetadataHash>,
|
||||||
|
pub global_hashes: Vec<(DepNode<()>, ich::Fingerprint)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EncodedMetadataHashes {
|
||||||
|
pub fn new() -> EncodedMetadataHashes {
|
||||||
|
EncodedMetadataHashes {
|
||||||
|
entry_hashes: Vec::new(),
|
||||||
|
global_hashes: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A store of Rust crates, through with their metadata
|
/// A store of Rust crates, through with their metadata
|
||||||
/// can be accessed.
|
/// can be accessed.
|
||||||
pub trait CrateStore {
|
pub trait CrateStore {
|
||||||
|
|
|
@ -11,8 +11,8 @@
|
||||||
pub use self::code_stats::{CodeStats, DataTypeKind, FieldInfo};
|
pub use self::code_stats::{CodeStats, DataTypeKind, FieldInfo};
|
||||||
pub use self::code_stats::{SizeKind, TypeSizeInfo, VariantInfo};
|
pub use self::code_stats::{SizeKind, TypeSizeInfo, VariantInfo};
|
||||||
|
|
||||||
use dep_graph::DepGraph;
|
use dep_graph::{DepGraph, DepNode};
|
||||||
use hir::def_id::{CrateNum, DefIndex};
|
use hir::def_id::{DefId, CrateNum, DefIndex, CRATE_DEF_INDEX};
|
||||||
use lint;
|
use lint;
|
||||||
use middle::cstore::CrateStore;
|
use middle::cstore::CrateStore;
|
||||||
use middle::dependency_format;
|
use middle::dependency_format;
|
||||||
|
@ -32,7 +32,7 @@ use syntax::parse::ParseSess;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
use syntax::{ast, codemap};
|
use syntax::{ast, codemap};
|
||||||
use syntax::feature_gate::AttributeType;
|
use syntax::feature_gate::AttributeType;
|
||||||
use syntax_pos::{Span, MultiSpan};
|
use syntax_pos::{Span, MultiSpan, FileMap};
|
||||||
|
|
||||||
use rustc_back::{LinkerFlavor, PanicStrategy};
|
use rustc_back::{LinkerFlavor, PanicStrategy};
|
||||||
use rustc_back::target::Target;
|
use rustc_back::target::Target;
|
||||||
|
@ -48,6 +48,7 @@ use std::io::Write;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
use std::sync::Arc;
|
||||||
use libc::c_int;
|
use libc::c_int;
|
||||||
|
|
||||||
mod code_stats;
|
mod code_stats;
|
||||||
|
@ -627,6 +628,22 @@ pub fn build_session_(sopts: config::Options,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
|
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
|
||||||
|
|
||||||
|
// Hook up the codemap with a callback that allows it to register FileMap
|
||||||
|
// accesses with the dependency graph.
|
||||||
|
let cm_depgraph = dep_graph.clone();
|
||||||
|
let codemap_dep_tracking_callback = Box::new(move |filemap: &FileMap| {
|
||||||
|
let def_id = DefId {
|
||||||
|
krate: CrateNum::from_u32(filemap.crate_of_origin),
|
||||||
|
index: CRATE_DEF_INDEX,
|
||||||
|
};
|
||||||
|
let name = Arc::new(filemap.name.clone());
|
||||||
|
let dep_node = DepNode::FileMap(def_id, name);
|
||||||
|
|
||||||
|
cm_depgraph.read(dep_node);
|
||||||
|
});
|
||||||
|
codemap.set_dep_tracking_callback(codemap_dep_tracking_callback);
|
||||||
|
|
||||||
let p_s = parse::ParseSess::with_span_handler(span_diagnostic, codemap);
|
let p_s = parse::ParseSess::with_span_handler(span_diagnostic, codemap);
|
||||||
let default_sysroot = match sopts.maybe_sysroot {
|
let default_sysroot = match sopts.maybe_sysroot {
|
||||||
Some(_) => None,
|
Some(_) => None,
|
||||||
|
|
|
@ -283,6 +283,16 @@ impl<CTX> HashStable<CTX> for str {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl<CTX> HashStable<CTX> for String {
|
||||||
|
#[inline]
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut CTX,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
(&self[..]).hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<CTX> HashStable<CTX> for bool {
|
impl<CTX> HashStable<CTX> for bool {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn hash_stable<W: StableHasherResult>(&self,
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
|
|
@ -1155,8 +1155,7 @@ fn write_out_deps(sess: &Session, outputs: &OutputFilenames, crate_name: &str) {
|
||||||
// Build a list of files used to compile the output and
|
// Build a list of files used to compile the output and
|
||||||
// write Makefile-compatible dependency rules
|
// write Makefile-compatible dependency rules
|
||||||
let files: Vec<String> = sess.codemap()
|
let files: Vec<String> = sess.codemap()
|
||||||
.files
|
.files()
|
||||||
.borrow()
|
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|fmap| fmap.is_real_file())
|
.filter(|fmap| fmap.is_real_file())
|
||||||
.filter(|fmap| !fmap.is_imported())
|
.filter(|fmap| !fmap.is_imported())
|
||||||
|
|
|
@ -29,9 +29,10 @@
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
use std::sync::Arc;
|
||||||
use rustc::dep_graph::DepNode;
|
use rustc::dep_graph::DepNode;
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
|
use rustc::hir::def_id::{LOCAL_CRATE, CRATE_DEF_INDEX, DefId};
|
||||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||||
use rustc::ich::{Fingerprint, StableHashingContext};
|
use rustc::ich::{Fingerprint, StableHashingContext};
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
|
@ -60,6 +61,10 @@ impl IncrementalHashesMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, k: &DepNode<DefId>) -> Option<&Fingerprint> {
|
||||||
|
self.hashes.get(k)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn insert(&mut self, k: DepNode<DefId>, v: Fingerprint) -> Option<Fingerprint> {
|
pub fn insert(&mut self, k: DepNode<DefId>, v: Fingerprint) -> Option<Fingerprint> {
|
||||||
self.hashes.insert(k, v)
|
self.hashes.insert(k, v)
|
||||||
}
|
}
|
||||||
|
@ -140,14 +145,34 @@ impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||||
let hcx = &mut self.hcx;
|
let hcx = &mut self.hcx;
|
||||||
let mut item_hashes: Vec<_> =
|
let mut item_hashes: Vec<_> =
|
||||||
self.hashes.iter()
|
self.hashes.iter()
|
||||||
.map(|(item_dep_node, &item_hash)| {
|
.filter_map(|(item_dep_node, &item_hash)| {
|
||||||
// convert from a DepNode<DefId> tp a
|
// This `match` determines what kinds of nodes
|
||||||
// DepNode<u64> where the u64 is the
|
// go into the SVH:
|
||||||
// hash of the def-id's def-path:
|
match *item_dep_node {
|
||||||
let item_dep_node =
|
DepNode::Hir(_) |
|
||||||
item_dep_node.map_def(|&did| Some(hcx.def_path_hash(did)))
|
DepNode::HirBody(_) => {
|
||||||
.unwrap();
|
// We want to incoporate these into the
|
||||||
(item_dep_node, item_hash)
|
// SVH.
|
||||||
|
}
|
||||||
|
DepNode::FileMap(..) => {
|
||||||
|
// These don't make a semantic
|
||||||
|
// difference, filter them out.
|
||||||
|
return None
|
||||||
|
}
|
||||||
|
ref other => {
|
||||||
|
bug!("Found unexpected DepNode during \
|
||||||
|
SVH computation: {:?}",
|
||||||
|
other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert from a DepNode<DefId> to a
|
||||||
|
// DepNode<u64> where the u64 is the hash of
|
||||||
|
// the def-id's def-path:
|
||||||
|
let item_dep_node =
|
||||||
|
item_dep_node.map_def(|&did| Some(hcx.def_path_hash(did)))
|
||||||
|
.unwrap();
|
||||||
|
Some((item_dep_node, item_hash))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
item_hashes.sort_unstable(); // avoid artificial dependencies on item ordering
|
item_hashes.sort_unstable(); // avoid artificial dependencies on item ordering
|
||||||
|
@ -229,6 +254,24 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||||
visitor.compute_and_store_ich_for_item_like(DepNode::Hir(def_id), false, macro_def);
|
visitor.compute_and_store_ich_for_item_like(DepNode::Hir(def_id), false, macro_def);
|
||||||
visitor.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, macro_def);
|
visitor.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, macro_def);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for filemap in tcx.sess
|
||||||
|
.codemap()
|
||||||
|
.files_untracked()
|
||||||
|
.iter()
|
||||||
|
.filter(|fm| !fm.is_imported()) {
|
||||||
|
assert_eq!(LOCAL_CRATE.as_u32(), filemap.crate_of_origin);
|
||||||
|
let def_id = DefId {
|
||||||
|
krate: LOCAL_CRATE,
|
||||||
|
index: CRATE_DEF_INDEX,
|
||||||
|
};
|
||||||
|
let name = Arc::new(filemap.name.clone());
|
||||||
|
let dep_node = DepNode::FileMap(def_id, name);
|
||||||
|
let mut hasher = IchHasher::new();
|
||||||
|
filemap.hash_stable(&mut visitor.hcx, &mut hasher);
|
||||||
|
let fingerprint = hasher.finish();
|
||||||
|
visitor.hashes.insert(dep_node, fingerprint);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
|
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
|
||||||
|
|
|
@ -99,7 +99,11 @@ pub struct SerializedMetadataHashes {
|
||||||
/// where `X` refers to some item in this crate. That `X` will be
|
/// where `X` refers to some item in this crate. That `X` will be
|
||||||
/// a `DefPathIndex` that gets retracted to the current `DefId`
|
/// a `DefPathIndex` that gets retracted to the current `DefId`
|
||||||
/// (matching the one found in this structure).
|
/// (matching the one found in this structure).
|
||||||
pub hashes: Vec<EncodedMetadataHash>,
|
pub entry_hashes: Vec<EncodedMetadataHash>,
|
||||||
|
|
||||||
|
/// This map contains fingerprints that are not specific to some DefId but
|
||||||
|
/// describe something global to the whole crate.
|
||||||
|
pub global_hashes: Vec<(DepNode<()>, Fingerprint)>,
|
||||||
|
|
||||||
/// For each DefIndex (as it occurs in SerializedMetadataHash), this
|
/// For each DefIndex (as it occurs in SerializedMetadataHash), this
|
||||||
/// map stores the DefPathIndex (as it occurs in DefIdDirectory), so
|
/// map stores the DefPathIndex (as it occurs in DefIdDirectory), so
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use rustc::dep_graph::DepNode;
|
use rustc::dep_graph::DepNode;
|
||||||
use rustc::hir::def_id::{CrateNum, DefId};
|
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
|
||||||
use rustc::hir::svh::Svh;
|
use rustc::hir::svh::Svh;
|
||||||
use rustc::ich::Fingerprint;
|
use rustc::ich::Fingerprint;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
|
@ -23,11 +23,15 @@ use super::data::*;
|
||||||
use super::fs::*;
|
use super::fs::*;
|
||||||
use super::file_format;
|
use super::file_format;
|
||||||
|
|
||||||
|
use std::hash::Hash;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
pub struct HashContext<'a, 'tcx: 'a> {
|
pub struct HashContext<'a, 'tcx: 'a> {
|
||||||
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
incremental_hashes_map: &'a IncrementalHashesMap,
|
incremental_hashes_map: &'a IncrementalHashesMap,
|
||||||
item_metadata_hashes: FxHashMap<DefId, Fingerprint>,
|
item_metadata_hashes: FxHashMap<DefId, Fingerprint>,
|
||||||
crate_hashes: FxHashMap<CrateNum, Svh>,
|
crate_hashes: FxHashMap<CrateNum, Svh>,
|
||||||
|
global_metadata_hashes: FxHashMap<DepNode<DefId>, Fingerprint>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
|
@ -39,6 +43,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
incremental_hashes_map: incremental_hashes_map,
|
incremental_hashes_map: incremental_hashes_map,
|
||||||
item_metadata_hashes: FxHashMap(),
|
item_metadata_hashes: FxHashMap(),
|
||||||
crate_hashes: FxHashMap(),
|
crate_hashes: FxHashMap(),
|
||||||
|
global_metadata_hashes: FxHashMap(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,9 +51,11 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
match *dep_node {
|
match *dep_node {
|
||||||
DepNode::Krate |
|
DepNode::Krate |
|
||||||
DepNode::Hir(_) |
|
DepNode::Hir(_) |
|
||||||
DepNode::HirBody(_) =>
|
DepNode::HirBody(_) |
|
||||||
|
DepNode::FileMap(..) =>
|
||||||
true,
|
true,
|
||||||
DepNode::MetaData(def_id) => !def_id.is_local(),
|
DepNode::MetaData(def_id) |
|
||||||
|
DepNode::GlobalMetaData(def_id, _) => !def_id.is_local(),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -60,7 +67,8 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// HIR nodes (which always come from our crate) are an input:
|
// HIR nodes (which always come from our crate) are an input:
|
||||||
DepNode::Hir(def_id) | DepNode::HirBody(def_id) => {
|
DepNode::Hir(def_id) |
|
||||||
|
DepNode::HirBody(def_id) => {
|
||||||
assert!(def_id.is_local(),
|
assert!(def_id.is_local(),
|
||||||
"cannot hash HIR for non-local def-id {:?} => {:?}",
|
"cannot hash HIR for non-local def-id {:?} => {:?}",
|
||||||
def_id,
|
def_id,
|
||||||
|
@ -69,12 +77,30 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
Some(self.incremental_hashes_map[dep_node])
|
Some(self.incremental_hashes_map[dep_node])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
DepNode::FileMap(def_id, ref name) => {
|
||||||
|
if def_id.is_local() {
|
||||||
|
Some(self.incremental_hashes_map[dep_node])
|
||||||
|
} else {
|
||||||
|
Some(self.metadata_hash(DepNode::FileMap(def_id, name.clone()),
|
||||||
|
def_id.krate,
|
||||||
|
|this| &mut this.global_metadata_hashes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// MetaData from other crates is an *input* to us.
|
// MetaData from other crates is an *input* to us.
|
||||||
// MetaData nodes from *our* crates are an *output*; we
|
// MetaData nodes from *our* crates are an *output*; we
|
||||||
// don't hash them, but we do compute a hash for them and
|
// don't hash them, but we do compute a hash for them and
|
||||||
// save it for others to use.
|
// save it for others to use.
|
||||||
DepNode::MetaData(def_id) if !def_id.is_local() => {
|
DepNode::MetaData(def_id) if !def_id.is_local() => {
|
||||||
Some(self.metadata_hash(def_id))
|
Some(self.metadata_hash(def_id,
|
||||||
|
def_id.krate,
|
||||||
|
|this| &mut this.item_metadata_hashes))
|
||||||
|
}
|
||||||
|
|
||||||
|
DepNode::GlobalMetaData(def_id, kind) => {
|
||||||
|
Some(self.metadata_hash(DepNode::GlobalMetaData(def_id, kind),
|
||||||
|
def_id.krate,
|
||||||
|
|this| &mut this.global_metadata_hashes))
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -87,33 +113,37 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn metadata_hash(&mut self, def_id: DefId) -> Fingerprint {
|
fn metadata_hash<K, C>(&mut self,
|
||||||
debug!("metadata_hash(def_id={:?})", def_id);
|
key: K,
|
||||||
|
cnum: CrateNum,
|
||||||
|
cache: C)
|
||||||
|
-> Fingerprint
|
||||||
|
where K: Hash + Eq + Debug,
|
||||||
|
C: Fn(&mut Self) -> &mut FxHashMap<K, Fingerprint>,
|
||||||
|
{
|
||||||
|
debug!("metadata_hash(key={:?})", key);
|
||||||
|
|
||||||
assert!(!def_id.is_local());
|
debug_assert!(cnum != LOCAL_CRATE);
|
||||||
loop {
|
loop {
|
||||||
// check whether we have a result cached for this def-id
|
// check whether we have a result cached for this def-id
|
||||||
if let Some(&hash) = self.item_metadata_hashes.get(&def_id) {
|
if let Some(&hash) = cache(self).get(&key) {
|
||||||
debug!("metadata_hash: def_id={:?} hash={:?}", def_id, hash);
|
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
|
||||||
// check whether we did not find detailed metadata for this
|
// check whether we did not find detailed metadata for this
|
||||||
// krate; in that case, we just use the krate's overall hash
|
// krate; in that case, we just use the krate's overall hash
|
||||||
if let Some(&svh) = self.crate_hashes.get(&def_id.krate) {
|
if let Some(&svh) = self.crate_hashes.get(&cnum) {
|
||||||
debug!("metadata_hash: def_id={:?} crate_hash={:?}", def_id, svh);
|
|
||||||
|
|
||||||
// micro-"optimization": avoid a cache miss if we ask
|
// micro-"optimization": avoid a cache miss if we ask
|
||||||
// for metadata from this particular def-id again.
|
// for metadata from this particular def-id again.
|
||||||
let fingerprint = svh_to_fingerprint(svh);
|
let fingerprint = svh_to_fingerprint(svh);
|
||||||
self.item_metadata_hashes.insert(def_id, fingerprint);
|
cache(self).insert(key, fingerprint);
|
||||||
|
|
||||||
return fingerprint;
|
return fingerprint;
|
||||||
}
|
}
|
||||||
|
|
||||||
// otherwise, load the data and repeat.
|
// otherwise, load the data and repeat.
|
||||||
self.load_data(def_id.krate);
|
self.load_data(cnum);
|
||||||
assert!(self.crate_hashes.contains_key(&def_id.krate));
|
assert!(self.crate_hashes.contains_key(&cnum));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,7 +221,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder)?;
|
let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder)?;
|
||||||
for serialized_hash in serialized_hashes.hashes {
|
for serialized_hash in serialized_hashes.entry_hashes {
|
||||||
// the hashes are stored with just a def-index, which is
|
// the hashes are stored with just a def-index, which is
|
||||||
// always relative to the old crate; convert that to use
|
// always relative to the old crate; convert that to use
|
||||||
// our internal crate number
|
// our internal crate number
|
||||||
|
@ -202,6 +232,24 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||||
debug!("load_from_data: def_id={:?} hash={}", def_id, serialized_hash.hash);
|
debug!("load_from_data: def_id={:?} hash={}", def_id, serialized_hash.hash);
|
||||||
assert!(old.is_none(), "already have hash for {:?}", def_id);
|
assert!(old.is_none(), "already have hash for {:?}", def_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (dep_node, fingerprint) in serialized_hashes.global_hashes {
|
||||||
|
// Here we need to remap the CrateNum in the DepNode.
|
||||||
|
let def_id = DefId { krate: cnum, index: CRATE_DEF_INDEX };
|
||||||
|
let dep_node = match dep_node {
|
||||||
|
DepNode::GlobalMetaData(_, kind) => DepNode::GlobalMetaData(def_id, kind),
|
||||||
|
DepNode::FileMap(_, name) => DepNode::FileMap(def_id, name),
|
||||||
|
other => {
|
||||||
|
bug!("unexpected DepNode variant: {:?}", other)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// record the hash for this dep-node
|
||||||
|
debug!("load_from_data: def_node={:?} hash={}", dep_node, fingerprint);
|
||||||
|
let old = self.global_metadata_hashes.insert(dep_node.clone(), fingerprint);
|
||||||
|
assert!(old.is_none(), "already have hash for {:?}", dep_node);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -240,35 +240,40 @@ fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
let mut hcx = HashContext::new(tcx, incremental_hashes_map);
|
let mut hcx = HashContext::new(tcx, incremental_hashes_map);
|
||||||
let mut dirty_nodes = FxHashMap();
|
let mut dirty_nodes = FxHashMap();
|
||||||
|
|
||||||
|
let print_removed_message = |dep_node: &DepNode<_>| {
|
||||||
|
if tcx.sess.opts.debugging_opts.incremental_dump_hash {
|
||||||
|
println!("node {:?} is dirty as it was removed", dep_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("initial_dirty_nodes: {:?} is dirty as it was removed", dep_node);
|
||||||
|
};
|
||||||
|
|
||||||
for hash in serialized_hashes {
|
for hash in serialized_hashes {
|
||||||
if let Some(dep_node) = retraced.map(&hash.dep_node) {
|
if let Some(dep_node) = retraced.map(&hash.dep_node) {
|
||||||
let current_hash = hcx.hash(&dep_node).unwrap();
|
if let Some(current_hash) = hcx.hash(&dep_node) {
|
||||||
if current_hash == hash.hash {
|
if current_hash == hash.hash {
|
||||||
debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
|
debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
|
||||||
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
|
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
|
||||||
current_hash);
|
current_hash);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if tcx.sess.opts.debugging_opts.incremental_dump_hash {
|
if tcx.sess.opts.debugging_opts.incremental_dump_hash {
|
||||||
println!("node {:?} is dirty as hash is {:?} was {:?}",
|
println!("node {:?} is dirty as hash is {:?} was {:?}",
|
||||||
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
|
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
|
||||||
current_hash,
|
current_hash,
|
||||||
hash.hash);
|
hash.hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
|
debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
|
||||||
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
|
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
|
||||||
current_hash,
|
current_hash,
|
||||||
hash.hash);
|
hash.hash);
|
||||||
|
} else {
|
||||||
|
print_removed_message(&hash.dep_node);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if tcx.sess.opts.debugging_opts.incremental_dump_hash {
|
print_removed_message(&hash.dep_node);
|
||||||
println!("node {:?} is dirty as it was removed",
|
|
||||||
hash.dep_node);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("initial_dirty_nodes: {:?} is dirty as it was removed",
|
|
||||||
hash.dep_node);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
dirty_nodes.insert(hash.dep_node.clone(), hash.dep_node.clone());
|
dirty_nodes.insert(hash.dep_node.clone(), hash.dep_node.clone());
|
||||||
|
@ -382,8 +387,8 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||||
|
|
||||||
debug!("load_prev_metadata_hashes() - Mapping DefIds");
|
debug!("load_prev_metadata_hashes() - Mapping DefIds");
|
||||||
|
|
||||||
assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len());
|
assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.entry_hashes.len());
|
||||||
for serialized_hash in serialized_hashes.hashes {
|
for serialized_hash in serialized_hashes.entry_hashes {
|
||||||
let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
|
let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
|
||||||
if let Some(def_id) = retraced.def_id(def_path_index) {
|
if let Some(def_id) = retraced.def_id(def_path_index) {
|
||||||
let old = output.insert(def_id, serialized_hash.hash);
|
let old = output.insert(def_id, serialized_hash.hash);
|
||||||
|
|
|
@ -12,7 +12,7 @@ use rustc::dep_graph::DepNode;
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::svh::Svh;
|
use rustc::hir::svh::Svh;
|
||||||
use rustc::ich::Fingerprint;
|
use rustc::ich::Fingerprint;
|
||||||
use rustc::middle::cstore::EncodedMetadataHash;
|
use rustc::middle::cstore::EncodedMetadataHashes;
|
||||||
use rustc::session::Session;
|
use rustc::session::Session;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
|
@ -34,7 +34,7 @@ use super::work_product;
|
||||||
|
|
||||||
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
incremental_hashes_map: &IncrementalHashesMap,
|
incremental_hashes_map: &IncrementalHashesMap,
|
||||||
metadata_hashes: &[EncodedMetadataHash],
|
metadata_hashes: &EncodedMetadataHashes,
|
||||||
svh: Svh) {
|
svh: Svh) {
|
||||||
debug!("save_dep_graph()");
|
debug!("save_dep_graph()");
|
||||||
let _ignore = tcx.dep_graph.in_ignore();
|
let _ignore = tcx.dep_graph.in_ignore();
|
||||||
|
@ -240,18 +240,19 @@ pub fn encode_dep_graph(preds: &Predecessors,
|
||||||
|
|
||||||
pub fn encode_metadata_hashes(tcx: TyCtxt,
|
pub fn encode_metadata_hashes(tcx: TyCtxt,
|
||||||
svh: Svh,
|
svh: Svh,
|
||||||
metadata_hashes: &[EncodedMetadataHash],
|
metadata_hashes: &EncodedMetadataHashes,
|
||||||
builder: &mut DefIdDirectoryBuilder,
|
builder: &mut DefIdDirectoryBuilder,
|
||||||
current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,
|
current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,
|
||||||
encoder: &mut Encoder)
|
encoder: &mut Encoder)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
let mut serialized_hashes = SerializedMetadataHashes {
|
let mut serialized_hashes = SerializedMetadataHashes {
|
||||||
hashes: metadata_hashes.to_vec(),
|
entry_hashes: metadata_hashes.entry_hashes.to_vec(),
|
||||||
|
global_hashes: metadata_hashes.global_hashes.to_vec(),
|
||||||
index_map: FxHashMap()
|
index_map: FxHashMap()
|
||||||
};
|
};
|
||||||
|
|
||||||
if tcx.sess.opts.debugging_opts.query_dep_graph {
|
if tcx.sess.opts.debugging_opts.query_dep_graph {
|
||||||
for serialized_hash in &serialized_hashes.hashes {
|
for serialized_hash in &serialized_hashes.entry_hashes {
|
||||||
let def_id = DefId::local(serialized_hash.def_index);
|
let def_id = DefId::local(serialized_hash.def_index);
|
||||||
|
|
||||||
// Store entry in the index_map
|
// Store entry in the index_map
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
use rustc::hir::intravisit::{Visitor, NestedVisitorMap};
|
use rustc::hir::intravisit::{Visitor, NestedVisitorMap};
|
||||||
|
|
||||||
use index_builder::EntryBuilder;
|
use isolated_encoder::IsolatedEncoder;
|
||||||
use schema::*;
|
use schema::*;
|
||||||
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
|
@ -31,7 +31,7 @@ impl_stable_hash_for!(struct Ast<'tcx> {
|
||||||
rvalue_promotable_to_static
|
rvalue_promotable_to_static
|
||||||
});
|
});
|
||||||
|
|
||||||
impl<'a, 'b, 'tcx> EntryBuilder<'a, 'b, 'tcx> {
|
impl<'a, 'b, 'tcx> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||||
pub fn encode_body(&mut self, body_id: hir::BodyId) -> Lazy<Ast<'tcx>> {
|
pub fn encode_body(&mut self, body_id: hir::BodyId) -> Lazy<Ast<'tcx>> {
|
||||||
let body = self.tcx.hir.body(body_id);
|
let body = self.tcx.hir.body(body_id);
|
||||||
let lazy_body = self.lazy(body);
|
let lazy_body = self.lazy(body);
|
||||||
|
|
|
@ -12,9 +12,10 @@
|
||||||
|
|
||||||
use cstore::{self, CStore, CrateSource, MetadataBlob};
|
use cstore::{self, CStore, CrateSource, MetadataBlob};
|
||||||
use locator::{self, CratePaths};
|
use locator::{self, CratePaths};
|
||||||
use schema::CrateRoot;
|
use schema::{CrateRoot, Tracked};
|
||||||
|
|
||||||
use rustc::hir::def_id::{CrateNum, DefIndex};
|
use rustc::dep_graph::{DepNode, GlobalMetaDataKind};
|
||||||
|
use rustc::hir::def_id::{DefId, CrateNum, DefIndex, CRATE_DEF_INDEX};
|
||||||
use rustc::hir::svh::Svh;
|
use rustc::hir::svh::Svh;
|
||||||
use rustc::middle::cstore::DepKind;
|
use rustc::middle::cstore::DepKind;
|
||||||
use rustc::session::Session;
|
use rustc::session::Session;
|
||||||
|
@ -311,7 +312,8 @@ impl<'a> CrateLoader<'a> {
|
||||||
crate_root.def_path_table.decode(&metadata)
|
crate_root.def_path_table.decode(&metadata)
|
||||||
});
|
});
|
||||||
|
|
||||||
let exported_symbols = crate_root.exported_symbols.decode(&metadata).collect();
|
let exported_symbols = crate_root.exported_symbols
|
||||||
|
.map(|x| x.decode(&metadata).collect());
|
||||||
|
|
||||||
let mut cmeta = cstore::CrateMetadata {
|
let mut cmeta = cstore::CrateMetadata {
|
||||||
name: name,
|
name: name,
|
||||||
|
@ -333,16 +335,27 @@ impl<'a> CrateLoader<'a> {
|
||||||
rlib: rlib,
|
rlib: rlib,
|
||||||
rmeta: rmeta,
|
rmeta: rmeta,
|
||||||
},
|
},
|
||||||
dllimport_foreign_items: FxHashSet(),
|
// Initialize this with an empty set. The field is populated below
|
||||||
|
// after we were able to deserialize its contents.
|
||||||
|
dllimport_foreign_items: Tracked::new(FxHashSet()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let dllimports: Vec<_> = cmeta.get_native_libraries().iter()
|
let dllimports: Tracked<FxHashSet<_>> = cmeta
|
||||||
.filter(|lib| relevant_lib(self.sess, lib) &&
|
.root
|
||||||
lib.kind == cstore::NativeLibraryKind::NativeUnknown)
|
.native_libraries
|
||||||
.flat_map(|lib| &lib.foreign_items)
|
.map(|native_libraries| {
|
||||||
.map(|id| *id)
|
let native_libraries: Vec<_> = native_libraries.decode(&cmeta)
|
||||||
.collect();
|
.collect();
|
||||||
cmeta.dllimport_foreign_items.extend(dllimports);
|
native_libraries
|
||||||
|
.iter()
|
||||||
|
.filter(|lib| relevant_lib(self.sess, lib) &&
|
||||||
|
lib.kind == cstore::NativeLibraryKind::NativeUnknown)
|
||||||
|
.flat_map(|lib| lib.foreign_items.iter())
|
||||||
|
.map(|id| *id)
|
||||||
|
.collect()
|
||||||
|
});
|
||||||
|
|
||||||
|
cmeta.dllimport_foreign_items = dllimports;
|
||||||
|
|
||||||
let cmeta = Rc::new(cmeta);
|
let cmeta = Rc::new(cmeta);
|
||||||
self.cstore.set_crate_data(cnum, cmeta.clone());
|
self.cstore.set_crate_data(cnum, cmeta.clone());
|
||||||
|
@ -493,10 +506,16 @@ impl<'a> CrateLoader<'a> {
|
||||||
return cstore::CrateNumMap::new();
|
return cstore::CrateNumMap::new();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let dep_node = DepNode::GlobalMetaData(DefId { krate, index: CRATE_DEF_INDEX },
|
||||||
|
GlobalMetaDataKind::CrateDeps);
|
||||||
|
|
||||||
// The map from crate numbers in the crate we're resolving to local crate numbers.
|
// The map from crate numbers in the crate we're resolving to local crate numbers.
|
||||||
// We map 0 and all other holes in the map to our parent crate. The "additional"
|
// We map 0 and all other holes in the map to our parent crate. The "additional"
|
||||||
// self-dependencies should be harmless.
|
// self-dependencies should be harmless.
|
||||||
::std::iter::once(krate).chain(crate_root.crate_deps.decode(metadata).map(|dep| {
|
::std::iter::once(krate).chain(crate_root.crate_deps
|
||||||
|
.get(&self.sess.dep_graph, dep_node)
|
||||||
|
.decode(metadata)
|
||||||
|
.map(|dep| {
|
||||||
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
|
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
|
||||||
if dep.kind == DepKind::UnexportedMacrosOnly {
|
if dep.kind == DepKind::UnexportedMacrosOnly {
|
||||||
return krate;
|
return krate;
|
||||||
|
@ -654,7 +673,9 @@ impl<'a> CrateLoader<'a> {
|
||||||
|
|
||||||
/// Look for a plugin registrar. Returns library path, crate
|
/// Look for a plugin registrar. Returns library path, crate
|
||||||
/// SVH and DefIndex of the registrar function.
|
/// SVH and DefIndex of the registrar function.
|
||||||
pub fn find_plugin_registrar(&mut self, span: Span, name: &str)
|
pub fn find_plugin_registrar(&mut self,
|
||||||
|
span: Span,
|
||||||
|
name: &str)
|
||||||
-> Option<(PathBuf, Symbol, DefIndex)> {
|
-> Option<(PathBuf, Symbol, DefIndex)> {
|
||||||
let ekrate = self.read_extension_crate(span, &ExternCrateInfo {
|
let ekrate = self.read_extension_crate(span, &ExternCrateInfo {
|
||||||
name: Symbol::intern(name),
|
name: Symbol::intern(name),
|
||||||
|
@ -740,13 +761,17 @@ impl<'a> CrateLoader<'a> {
|
||||||
let mut runtime_found = false;
|
let mut runtime_found = false;
|
||||||
let mut needs_panic_runtime = attr::contains_name(&krate.attrs,
|
let mut needs_panic_runtime = attr::contains_name(&krate.attrs,
|
||||||
"needs_panic_runtime");
|
"needs_panic_runtime");
|
||||||
|
|
||||||
|
let dep_graph = &self.sess.dep_graph;
|
||||||
|
|
||||||
self.cstore.iter_crate_data(|cnum, data| {
|
self.cstore.iter_crate_data(|cnum, data| {
|
||||||
needs_panic_runtime = needs_panic_runtime || data.needs_panic_runtime();
|
needs_panic_runtime = needs_panic_runtime ||
|
||||||
if data.is_panic_runtime() {
|
data.needs_panic_runtime(dep_graph);
|
||||||
|
if data.is_panic_runtime(dep_graph) {
|
||||||
// Inject a dependency from all #![needs_panic_runtime] to this
|
// Inject a dependency from all #![needs_panic_runtime] to this
|
||||||
// #![panic_runtime] crate.
|
// #![panic_runtime] crate.
|
||||||
self.inject_dependency_if(cnum, "a panic runtime",
|
self.inject_dependency_if(cnum, "a panic runtime",
|
||||||
&|data| data.needs_panic_runtime());
|
&|data| data.needs_panic_runtime(dep_graph));
|
||||||
runtime_found = runtime_found || data.dep_kind.get() == DepKind::Explicit;
|
runtime_found = runtime_found || data.dep_kind.get() == DepKind::Explicit;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -782,11 +807,11 @@ impl<'a> CrateLoader<'a> {
|
||||||
|
|
||||||
// Sanity check the loaded crate to ensure it is indeed a panic runtime
|
// Sanity check the loaded crate to ensure it is indeed a panic runtime
|
||||||
// and the panic strategy is indeed what we thought it was.
|
// and the panic strategy is indeed what we thought it was.
|
||||||
if !data.is_panic_runtime() {
|
if !data.is_panic_runtime(dep_graph) {
|
||||||
self.sess.err(&format!("the crate `{}` is not a panic runtime",
|
self.sess.err(&format!("the crate `{}` is not a panic runtime",
|
||||||
name));
|
name));
|
||||||
}
|
}
|
||||||
if data.panic_strategy() != desired_strategy {
|
if data.panic_strategy(dep_graph) != desired_strategy {
|
||||||
self.sess.err(&format!("the crate `{}` does not have the panic \
|
self.sess.err(&format!("the crate `{}` does not have the panic \
|
||||||
strategy `{}`",
|
strategy `{}`",
|
||||||
name, desired_strategy.desc()));
|
name, desired_strategy.desc()));
|
||||||
|
@ -794,7 +819,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
|
|
||||||
self.sess.injected_panic_runtime.set(Some(cnum));
|
self.sess.injected_panic_runtime.set(Some(cnum));
|
||||||
self.inject_dependency_if(cnum, "a panic runtime",
|
self.inject_dependency_if(cnum, "a panic runtime",
|
||||||
&|data| data.needs_panic_runtime());
|
&|data| data.needs_panic_runtime(dep_graph));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inject_sanitizer_runtime(&mut self) {
|
fn inject_sanitizer_runtime(&mut self) {
|
||||||
|
@ -862,7 +887,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
PathKind::Crate, dep_kind);
|
PathKind::Crate, dep_kind);
|
||||||
|
|
||||||
// Sanity check the loaded crate to ensure it is indeed a sanitizer runtime
|
// Sanity check the loaded crate to ensure it is indeed a sanitizer runtime
|
||||||
if !data.is_sanitizer_runtime() {
|
if !data.is_sanitizer_runtime(&self.sess.dep_graph) {
|
||||||
self.sess.err(&format!("the crate `{}` is not a sanitizer runtime",
|
self.sess.err(&format!("the crate `{}` is not a sanitizer runtime",
|
||||||
name));
|
name));
|
||||||
}
|
}
|
||||||
|
@ -878,12 +903,13 @@ impl<'a> CrateLoader<'a> {
|
||||||
// also bail out as we don't need to implicitly inject one.
|
// also bail out as we don't need to implicitly inject one.
|
||||||
let mut needs_allocator = false;
|
let mut needs_allocator = false;
|
||||||
let mut found_required_allocator = false;
|
let mut found_required_allocator = false;
|
||||||
|
let dep_graph = &self.sess.dep_graph;
|
||||||
self.cstore.iter_crate_data(|cnum, data| {
|
self.cstore.iter_crate_data(|cnum, data| {
|
||||||
needs_allocator = needs_allocator || data.needs_allocator();
|
needs_allocator = needs_allocator || data.needs_allocator(dep_graph);
|
||||||
if data.is_allocator() {
|
if data.is_allocator(dep_graph) {
|
||||||
info!("{} required by rlib and is an allocator", data.name());
|
info!("{} required by rlib and is an allocator", data.name());
|
||||||
self.inject_dependency_if(cnum, "an allocator",
|
self.inject_dependency_if(cnum, "an allocator",
|
||||||
&|data| data.needs_allocator());
|
&|data| data.needs_allocator(dep_graph));
|
||||||
found_required_allocator = found_required_allocator ||
|
found_required_allocator = found_required_allocator ||
|
||||||
data.dep_kind.get() == DepKind::Explicit;
|
data.dep_kind.get() == DepKind::Explicit;
|
||||||
}
|
}
|
||||||
|
@ -937,14 +963,14 @@ impl<'a> CrateLoader<'a> {
|
||||||
|
|
||||||
// Sanity check the crate we loaded to ensure that it is indeed an
|
// Sanity check the crate we loaded to ensure that it is indeed an
|
||||||
// allocator.
|
// allocator.
|
||||||
if !data.is_allocator() {
|
if !data.is_allocator(dep_graph) {
|
||||||
self.sess.err(&format!("the allocator crate `{}` is not tagged \
|
self.sess.err(&format!("the allocator crate `{}` is not tagged \
|
||||||
with #![allocator]", data.name()));
|
with #![allocator]", data.name()));
|
||||||
}
|
}
|
||||||
|
|
||||||
self.sess.injected_allocator.set(Some(cnum));
|
self.sess.injected_allocator.set(Some(cnum));
|
||||||
self.inject_dependency_if(cnum, "an allocator",
|
self.inject_dependency_if(cnum, "an allocator",
|
||||||
&|data| data.needs_allocator());
|
&|data| data.needs_allocator(dep_graph));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inject_dependency_if(&self,
|
fn inject_dependency_if(&self,
|
||||||
|
|
|
@ -12,9 +12,9 @@
|
||||||
// crates and libraries
|
// crates and libraries
|
||||||
|
|
||||||
use locator;
|
use locator;
|
||||||
use schema;
|
use schema::{self, Tracked};
|
||||||
|
|
||||||
use rustc::dep_graph::DepGraph;
|
use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind};
|
||||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefIndex, DefId};
|
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefIndex, DefId};
|
||||||
use rustc::hir::map::definitions::DefPathTable;
|
use rustc::hir::map::definitions::DefPathTable;
|
||||||
use rustc::hir::svh::Svh;
|
use rustc::hir::svh::Svh;
|
||||||
|
@ -83,14 +83,14 @@ pub struct CrateMetadata {
|
||||||
/// compilation support.
|
/// compilation support.
|
||||||
pub def_path_table: DefPathTable,
|
pub def_path_table: DefPathTable,
|
||||||
|
|
||||||
pub exported_symbols: FxHashSet<DefIndex>,
|
pub exported_symbols: Tracked<FxHashSet<DefIndex>>,
|
||||||
|
|
||||||
pub dep_kind: Cell<DepKind>,
|
pub dep_kind: Cell<DepKind>,
|
||||||
pub source: CrateSource,
|
pub source: CrateSource,
|
||||||
|
|
||||||
pub proc_macros: Option<Vec<(ast::Name, Rc<SyntaxExtension>)>>,
|
pub proc_macros: Option<Vec<(ast::Name, Rc<SyntaxExtension>)>>,
|
||||||
// Foreign items imported from a dylib (Windows only)
|
// Foreign items imported from a dylib (Windows only)
|
||||||
pub dllimport_foreign_items: FxHashSet<DefIndex>,
|
pub dllimport_foreign_items: Tracked<FxHashSet<DefIndex>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CStore {
|
pub struct CStore {
|
||||||
|
@ -269,8 +269,8 @@ impl CrateMetadata {
|
||||||
self.root.disambiguator
|
self.root.disambiguator
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_staged_api(&self) -> bool {
|
pub fn is_staged_api(&self, dep_graph: &DepGraph) -> bool {
|
||||||
for attr in self.get_item_attrs(CRATE_DEF_INDEX).iter() {
|
for attr in self.get_item_attrs(CRATE_DEF_INDEX, dep_graph).iter() {
|
||||||
if attr.path == "stable" || attr.path == "unstable" {
|
if attr.path == "stable" || attr.path == "unstable" {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -278,42 +278,51 @@ impl CrateMetadata {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_allocator(&self) -> bool {
|
pub fn is_allocator(&self, dep_graph: &DepGraph) -> bool {
|
||||||
let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
|
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
|
||||||
attr::contains_name(&attrs, "allocator")
|
attr::contains_name(&attrs, "allocator")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn needs_allocator(&self) -> bool {
|
pub fn needs_allocator(&self, dep_graph: &DepGraph) -> bool {
|
||||||
let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
|
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
|
||||||
attr::contains_name(&attrs, "needs_allocator")
|
attr::contains_name(&attrs, "needs_allocator")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_panic_runtime(&self) -> bool {
|
pub fn is_panic_runtime(&self, dep_graph: &DepGraph) -> bool {
|
||||||
let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
|
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
|
||||||
attr::contains_name(&attrs, "panic_runtime")
|
attr::contains_name(&attrs, "panic_runtime")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn needs_panic_runtime(&self) -> bool {
|
pub fn needs_panic_runtime(&self, dep_graph: &DepGraph) -> bool {
|
||||||
let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
|
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
|
||||||
attr::contains_name(&attrs, "needs_panic_runtime")
|
attr::contains_name(&attrs, "needs_panic_runtime")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_compiler_builtins(&self) -> bool {
|
pub fn is_compiler_builtins(&self, dep_graph: &DepGraph) -> bool {
|
||||||
let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
|
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
|
||||||
attr::contains_name(&attrs, "compiler_builtins")
|
attr::contains_name(&attrs, "compiler_builtins")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_sanitizer_runtime(&self) -> bool {
|
pub fn is_sanitizer_runtime(&self, dep_graph: &DepGraph) -> bool {
|
||||||
let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
|
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
|
||||||
attr::contains_name(&attrs, "sanitizer_runtime")
|
attr::contains_name(&attrs, "sanitizer_runtime")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_no_builtins(&self) -> bool {
|
pub fn is_no_builtins(&self, dep_graph: &DepGraph) -> bool {
|
||||||
let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
|
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
|
||||||
attr::contains_name(&attrs, "no_builtins")
|
attr::contains_name(&attrs, "no_builtins")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn panic_strategy(&self) -> PanicStrategy {
|
pub fn panic_strategy(&self, dep_graph: &DepGraph) -> PanicStrategy {
|
||||||
self.root.panic_strategy.clone()
|
let def_id = DefId {
|
||||||
|
krate: self.cnum,
|
||||||
|
index: CRATE_DEF_INDEX,
|
||||||
|
};
|
||||||
|
let dep_node = DepNode::GlobalMetaData(def_id, GlobalMetaDataKind::Krate);
|
||||||
|
|
||||||
|
self.root
|
||||||
|
.panic_strategy
|
||||||
|
.get(dep_graph, dep_node)
|
||||||
|
.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ use rustc::ty::{self, TyCtxt};
|
||||||
use rustc::ty::maps::Providers;
|
use rustc::ty::maps::Providers;
|
||||||
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
|
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||||
|
|
||||||
use rustc::dep_graph::DepNode;
|
use rustc::dep_graph::{DepNode, GlobalMetaDataKind};
|
||||||
use rustc::hir::map::{DefKey, DefPath, DisambiguatedDefPathData};
|
use rustc::hir::map::{DefKey, DefPath, DisambiguatedDefPathData};
|
||||||
use rustc::util::nodemap::{NodeSet, DefIdMap};
|
use rustc::util::nodemap::{NodeSet, DefIdMap};
|
||||||
use rustc_back::PanicStrategy;
|
use rustc_back::PanicStrategy;
|
||||||
|
@ -147,8 +147,8 @@ impl CrateStore for cstore::CStore {
|
||||||
|
|
||||||
fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]>
|
fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]>
|
||||||
{
|
{
|
||||||
self.dep_graph.read(DepNode::MetaData(def_id));
|
self.get_crate_data(def_id.krate)
|
||||||
self.get_crate_data(def_id.krate).get_item_attrs(def_id.index)
|
.get_item_attrs(def_id.index, &self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>
|
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>
|
||||||
|
@ -168,7 +168,7 @@ impl CrateStore for cstore::CStore {
|
||||||
}
|
}
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
self.iter_crate_data(|_, cdata| {
|
self.iter_crate_data(|_, cdata| {
|
||||||
cdata.get_implementations_for_trait(filter, &mut result)
|
cdata.get_implementations_for_trait(filter, &self.dep_graph, &mut result)
|
||||||
});
|
});
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
@ -216,70 +216,82 @@ impl CrateStore for cstore::CStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_exported_symbol(&self, def_id: DefId) -> bool {
|
fn is_exported_symbol(&self, def_id: DefId) -> bool {
|
||||||
self.get_crate_data(def_id.krate).exported_symbols.contains(&def_id.index)
|
let data = self.get_crate_data(def_id.krate);
|
||||||
|
let dep_node = data.metadata_dep_node(GlobalMetaDataKind::ExportedSymbols);
|
||||||
|
data.exported_symbols
|
||||||
|
.get(&self.dep_graph, dep_node)
|
||||||
|
.contains(&def_id.index)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_dllimport_foreign_item(&self, def_id: DefId) -> bool {
|
fn is_dllimport_foreign_item(&self, def_id: DefId) -> bool {
|
||||||
if def_id.krate == LOCAL_CRATE {
|
if def_id.krate == LOCAL_CRATE {
|
||||||
self.dllimport_foreign_items.borrow().contains(&def_id.index)
|
self.dllimport_foreign_items.borrow().contains(&def_id.index)
|
||||||
} else {
|
} else {
|
||||||
self.get_crate_data(def_id.krate).is_dllimport_foreign_item(def_id.index)
|
self.get_crate_data(def_id.krate)
|
||||||
|
.is_dllimport_foreign_item(def_id.index, &self.dep_graph)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dylib_dependency_formats(&self, cnum: CrateNum)
|
fn dylib_dependency_formats(&self, cnum: CrateNum)
|
||||||
-> Vec<(CrateNum, LinkagePreference)>
|
-> Vec<(CrateNum, LinkagePreference)>
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).get_dylib_dependency_formats()
|
self.get_crate_data(cnum).get_dylib_dependency_formats(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dep_kind(&self, cnum: CrateNum) -> DepKind
|
fn dep_kind(&self, cnum: CrateNum) -> DepKind
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).dep_kind.get()
|
let data = self.get_crate_data(cnum);
|
||||||
|
let dep_node = data.metadata_dep_node(GlobalMetaDataKind::CrateDeps);
|
||||||
|
self.dep_graph.read(dep_node);
|
||||||
|
data.dep_kind.get()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn export_macros(&self, cnum: CrateNum) {
|
fn export_macros(&self, cnum: CrateNum) {
|
||||||
if self.get_crate_data(cnum).dep_kind.get() == DepKind::UnexportedMacrosOnly {
|
let data = self.get_crate_data(cnum);
|
||||||
self.get_crate_data(cnum).dep_kind.set(DepKind::MacrosOnly)
|
let dep_node = data.metadata_dep_node(GlobalMetaDataKind::CrateDeps);
|
||||||
|
|
||||||
|
self.dep_graph.read(dep_node);
|
||||||
|
if data.dep_kind.get() == DepKind::UnexportedMacrosOnly {
|
||||||
|
data.dep_kind.set(DepKind::MacrosOnly)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)>
|
fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)>
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).get_lang_items()
|
self.get_crate_data(cnum).get_lang_items(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn missing_lang_items(&self, cnum: CrateNum)
|
fn missing_lang_items(&self, cnum: CrateNum)
|
||||||
-> Vec<lang_items::LangItem>
|
-> Vec<lang_items::LangItem>
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).get_missing_lang_items()
|
self.get_crate_data(cnum).get_missing_lang_items(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_staged_api(&self, cnum: CrateNum) -> bool
|
fn is_staged_api(&self, cnum: CrateNum) -> bool
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).is_staged_api()
|
self.get_crate_data(cnum).is_staged_api(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_allocator(&self, cnum: CrateNum) -> bool
|
fn is_allocator(&self, cnum: CrateNum) -> bool
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).is_allocator()
|
self.get_crate_data(cnum).is_allocator(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_panic_runtime(&self, cnum: CrateNum) -> bool
|
fn is_panic_runtime(&self, cnum: CrateNum) -> bool
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).is_panic_runtime()
|
self.get_crate_data(cnum).is_panic_runtime(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_compiler_builtins(&self, cnum: CrateNum) -> bool {
|
fn is_compiler_builtins(&self, cnum: CrateNum) -> bool {
|
||||||
self.get_crate_data(cnum).is_compiler_builtins()
|
self.get_crate_data(cnum).is_compiler_builtins(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_sanitizer_runtime(&self, cnum: CrateNum) -> bool {
|
fn is_sanitizer_runtime(&self, cnum: CrateNum) -> bool {
|
||||||
self.get_crate_data(cnum).is_sanitizer_runtime()
|
self.get_crate_data(cnum).is_sanitizer_runtime(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy {
|
fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy {
|
||||||
self.get_crate_data(cnum).panic_strategy()
|
self.get_crate_data(cnum).panic_strategy(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn crate_name(&self, cnum: CrateNum) -> Symbol
|
fn crate_name(&self, cnum: CrateNum) -> Symbol
|
||||||
|
@ -325,16 +337,16 @@ impl CrateStore for cstore::CStore {
|
||||||
|
|
||||||
fn native_libraries(&self, cnum: CrateNum) -> Vec<NativeLibrary>
|
fn native_libraries(&self, cnum: CrateNum) -> Vec<NativeLibrary>
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).get_native_libraries()
|
self.get_crate_data(cnum).get_native_libraries(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exported_symbols(&self, cnum: CrateNum) -> Vec<DefId>
|
fn exported_symbols(&self, cnum: CrateNum) -> Vec<DefId>
|
||||||
{
|
{
|
||||||
self.get_crate_data(cnum).get_exported_symbols()
|
self.get_crate_data(cnum).get_exported_symbols(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_no_builtins(&self, cnum: CrateNum) -> bool {
|
fn is_no_builtins(&self, cnum: CrateNum) -> bool {
|
||||||
self.get_crate_data(cnum).is_no_builtins()
|
self.get_crate_data(cnum).is_no_builtins(&self.dep_graph)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn retrace_path(&self,
|
fn retrace_path(&self,
|
||||||
|
@ -401,7 +413,7 @@ impl CrateStore for cstore::CStore {
|
||||||
let body = filemap_to_stream(&sess.parse_sess, filemap);
|
let body = filemap_to_stream(&sess.parse_sess, filemap);
|
||||||
|
|
||||||
// Mark the attrs as used
|
// Mark the attrs as used
|
||||||
let attrs = data.get_item_attrs(id.index);
|
let attrs = data.get_item_attrs(id.index, &self.dep_graph);
|
||||||
for attr in attrs.iter() {
|
for attr in attrs.iter() {
|
||||||
attr::mark_used(attr);
|
attr::mark_used(attr);
|
||||||
}
|
}
|
||||||
|
@ -483,7 +495,7 @@ impl CrateStore for cstore::CStore {
|
||||||
reachable: &NodeSet)
|
reachable: &NodeSet)
|
||||||
-> EncodedMetadata
|
-> EncodedMetadata
|
||||||
{
|
{
|
||||||
encoder::encode_metadata(tcx, self, link_meta, reachable)
|
encoder::encode_metadata(tcx, link_meta, reachable)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn metadata_encoding_version(&self) -> &[u8]
|
fn metadata_encoding_version(&self) -> &[u8]
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
use cstore::{self, CrateMetadata, MetadataBlob, NativeLibrary};
|
use cstore::{self, CrateMetadata, MetadataBlob, NativeLibrary};
|
||||||
use schema::*;
|
use schema::*;
|
||||||
|
|
||||||
|
use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind};
|
||||||
use rustc::hir::map::{DefKey, DefPath, DefPathData};
|
use rustc::hir::map::{DefKey, DefPath, DefPathData};
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
|
|
||||||
|
@ -404,10 +405,14 @@ impl<'a, 'tcx> MetadataBlob {
|
||||||
Lazy::with_position(pos).decode(self)
|
Lazy::with_position(pos).decode(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_crate_metadata(&self, out: &mut io::Write) -> io::Result<()> {
|
pub fn list_crate_metadata(&self,
|
||||||
|
out: &mut io::Write) -> io::Result<()> {
|
||||||
write!(out, "=External Dependencies=\n")?;
|
write!(out, "=External Dependencies=\n")?;
|
||||||
let root = self.get_root();
|
let root = self.get_root();
|
||||||
for (i, dep) in root.crate_deps.decode(self).enumerate() {
|
for (i, dep) in root.crate_deps
|
||||||
|
.get_untracked()
|
||||||
|
.decode(self)
|
||||||
|
.enumerate() {
|
||||||
write!(out, "{} {}-{}\n", i + 1, dep.name, dep.hash)?;
|
write!(out, "{} {}-{}\n", i + 1, dep.name, dep.hash)?;
|
||||||
}
|
}
|
||||||
write!(out, "\n")?;
|
write!(out, "\n")?;
|
||||||
|
@ -653,8 +658,13 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterates over the language items in the given crate.
|
/// Iterates over the language items in the given crate.
|
||||||
pub fn get_lang_items(&self) -> Vec<(DefIndex, usize)> {
|
pub fn get_lang_items(&self, dep_graph: &DepGraph) -> Vec<(DefIndex, usize)> {
|
||||||
self.root.lang_items.decode(self).collect()
|
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::LangItems);
|
||||||
|
self.root
|
||||||
|
.lang_items
|
||||||
|
.get(dep_graph, dep_node)
|
||||||
|
.decode(self)
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterates over each child of the given item.
|
/// Iterates over each child of the given item.
|
||||||
|
@ -853,13 +863,17 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_item_attrs(&self, node_id: DefIndex) -> Rc<[ast::Attribute]> {
|
pub fn get_item_attrs(&self,
|
||||||
|
node_id: DefIndex,
|
||||||
|
dep_graph: &DepGraph) -> Rc<[ast::Attribute]> {
|
||||||
let (node_as, node_index) =
|
let (node_as, node_index) =
|
||||||
(node_id.address_space().index(), node_id.as_array_index());
|
(node_id.address_space().index(), node_id.as_array_index());
|
||||||
if self.is_proc_macro(node_id) {
|
if self.is_proc_macro(node_id) {
|
||||||
return Rc::new([]);
|
return Rc::new([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dep_graph.read(DepNode::MetaData(self.local_def_id(node_id)));
|
||||||
|
|
||||||
if let Some(&Some(ref val)) =
|
if let Some(&Some(ref val)) =
|
||||||
self.attribute_cache.borrow()[node_as].get(node_index) {
|
self.attribute_cache.borrow()[node_as].get(node_index) {
|
||||||
return val.clone();
|
return val.clone();
|
||||||
|
@ -924,7 +938,10 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_implementations_for_trait(&self, filter: Option<DefId>, result: &mut Vec<DefId>) {
|
pub fn get_implementations_for_trait(&self,
|
||||||
|
filter: Option<DefId>,
|
||||||
|
dep_graph: &DepGraph,
|
||||||
|
result: &mut Vec<DefId>) {
|
||||||
// Do a reverse lookup beforehand to avoid touching the crate_num
|
// Do a reverse lookup beforehand to avoid touching the crate_num
|
||||||
// hash map in the loop below.
|
// hash map in the loop below.
|
||||||
let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) {
|
let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) {
|
||||||
|
@ -935,7 +952,8 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME(eddyb) Make this O(1) instead of O(n).
|
// FIXME(eddyb) Make this O(1) instead of O(n).
|
||||||
for trait_impls in self.root.impls.decode(self) {
|
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::Impls);
|
||||||
|
for trait_impls in self.root.impls.get(dep_graph, dep_node).decode(self) {
|
||||||
if filter.is_some() && filter != Some(trait_impls.trait_id) {
|
if filter.is_some() && filter != Some(trait_impls.trait_id) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -958,13 +976,29 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn get_native_libraries(&self) -> Vec<NativeLibrary> {
|
pub fn get_native_libraries(&self,
|
||||||
self.root.native_libraries.decode(self).collect()
|
dep_graph: &DepGraph)
|
||||||
|
-> Vec<NativeLibrary> {
|
||||||
|
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::NativeLibraries);
|
||||||
|
self.root
|
||||||
|
.native_libraries
|
||||||
|
.get(dep_graph, dep_node)
|
||||||
|
.decode(self)
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_dylib_dependency_formats(&self) -> Vec<(CrateNum, LinkagePreference)> {
|
pub fn get_dylib_dependency_formats(&self,
|
||||||
|
dep_graph: &DepGraph)
|
||||||
|
-> Vec<(CrateNum, LinkagePreference)> {
|
||||||
|
let def_id = DefId {
|
||||||
|
krate: self.cnum,
|
||||||
|
index: CRATE_DEF_INDEX,
|
||||||
|
};
|
||||||
|
let dep_node = DepNode::GlobalMetaData(def_id,
|
||||||
|
GlobalMetaDataKind::DylibDependencyFormats);
|
||||||
self.root
|
self.root
|
||||||
.dylib_dependency_formats
|
.dylib_dependency_formats
|
||||||
|
.get(dep_graph, dep_node)
|
||||||
.decode(self)
|
.decode(self)
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.flat_map(|(i, link)| {
|
.flat_map(|(i, link)| {
|
||||||
|
@ -974,8 +1008,13 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_missing_lang_items(&self) -> Vec<lang_items::LangItem> {
|
pub fn get_missing_lang_items(&self, dep_graph: &DepGraph) -> Vec<lang_items::LangItem> {
|
||||||
self.root.lang_items_missing.decode(self).collect()
|
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::LangItemsMissing);
|
||||||
|
self.root
|
||||||
|
.lang_items_missing
|
||||||
|
.get(dep_graph, dep_node)
|
||||||
|
.decode(self)
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_fn_arg_names(&self, id: DefIndex) -> Vec<ast::Name> {
|
pub fn get_fn_arg_names(&self, id: DefIndex) -> Vec<ast::Name> {
|
||||||
|
@ -988,8 +1027,13 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
arg_names.decode(self).collect()
|
arg_names.decode(self).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_exported_symbols(&self) -> Vec<DefId> {
|
pub fn get_exported_symbols(&self, dep_graph: &DepGraph) -> Vec<DefId> {
|
||||||
self.exported_symbols.iter().map(|&index| self.local_def_id(index)).collect()
|
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::ExportedSymbols);
|
||||||
|
self.exported_symbols
|
||||||
|
.get(dep_graph, dep_node)
|
||||||
|
.iter()
|
||||||
|
.map(|&index| self.local_def_id(index))
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_macro(&self, id: DefIndex) -> (ast::Name, MacroDef) {
|
pub fn get_macro(&self, id: DefIndex) -> (ast::Name, MacroDef) {
|
||||||
|
@ -1018,8 +1062,11 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_dllimport_foreign_item(&self, id: DefIndex) -> bool {
|
pub fn is_dllimport_foreign_item(&self, id: DefIndex, dep_graph: &DepGraph) -> bool {
|
||||||
self.dllimport_foreign_items.contains(&id)
|
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::NativeLibraries);
|
||||||
|
self.dllimport_foreign_items
|
||||||
|
.get(dep_graph, dep_node)
|
||||||
|
.contains(&id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_default_impl(&self, impl_id: DefIndex) -> bool {
|
pub fn is_default_impl(&self, impl_id: DefIndex) -> bool {
|
||||||
|
@ -1097,121 +1144,62 @@ impl<'a, 'tcx> CrateMetadata {
|
||||||
let external_codemap = self.root.codemap.decode(self);
|
let external_codemap = self.root.codemap.decode(self);
|
||||||
|
|
||||||
let imported_filemaps = external_codemap.map(|filemap_to_import| {
|
let imported_filemaps = external_codemap.map(|filemap_to_import| {
|
||||||
// Try to find an existing FileMap that can be reused for the filemap to
|
// We can't reuse an existing FileMap, so allocate a new one
|
||||||
// be imported. A FileMap is reusable if it is exactly the same, just
|
// containing the information we need.
|
||||||
// positioned at a different offset within the codemap.
|
let syntax_pos::FileMap { name,
|
||||||
let reusable_filemap = {
|
name_was_remapped,
|
||||||
local_codemap.files
|
start_pos,
|
||||||
.borrow()
|
end_pos,
|
||||||
.iter()
|
lines,
|
||||||
.find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import))
|
multibyte_chars,
|
||||||
.map(|rc| rc.clone())
|
.. } = filemap_to_import;
|
||||||
};
|
|
||||||
|
|
||||||
match reusable_filemap {
|
let source_length = (end_pos - start_pos).to_usize();
|
||||||
Some(fm) => {
|
|
||||||
|
|
||||||
debug!("CrateMetaData::imported_filemaps reuse \
|
// Translate line-start positions and multibyte character
|
||||||
filemap {:?} original (start_pos {:?} end_pos {:?}) \
|
// position into frame of reference local to file.
|
||||||
translated (start_pos {:?} end_pos {:?})",
|
// `CodeMap::new_imported_filemap()` will then translate those
|
||||||
filemap_to_import.name,
|
// coordinates to their new global frame of reference when the
|
||||||
filemap_to_import.start_pos, filemap_to_import.end_pos,
|
// offset of the FileMap is known.
|
||||||
fm.start_pos, fm.end_pos);
|
let mut lines = lines.into_inner();
|
||||||
|
for pos in &mut lines {
|
||||||
|
*pos = *pos - start_pos;
|
||||||
|
}
|
||||||
|
let mut multibyte_chars = multibyte_chars.into_inner();
|
||||||
|
for mbc in &mut multibyte_chars {
|
||||||
|
mbc.pos = mbc.pos - start_pos;
|
||||||
|
}
|
||||||
|
|
||||||
cstore::ImportedFileMap {
|
let local_version = local_codemap.new_imported_filemap(name,
|
||||||
original_start_pos: filemap_to_import.start_pos,
|
name_was_remapped,
|
||||||
original_end_pos: filemap_to_import.end_pos,
|
self.cnum.as_u32(),
|
||||||
translated_filemap: fm,
|
source_length,
|
||||||
}
|
lines,
|
||||||
}
|
multibyte_chars);
|
||||||
None => {
|
debug!("CrateMetaData::imported_filemaps alloc \
|
||||||
// We can't reuse an existing FileMap, so allocate a new one
|
filemap {:?} original (start_pos {:?} end_pos {:?}) \
|
||||||
// containing the information we need.
|
translated (start_pos {:?} end_pos {:?})",
|
||||||
let syntax_pos::FileMap { name,
|
local_version.name, start_pos, end_pos,
|
||||||
name_was_remapped,
|
local_version.start_pos, local_version.end_pos);
|
||||||
start_pos,
|
|
||||||
end_pos,
|
|
||||||
lines,
|
|
||||||
multibyte_chars,
|
|
||||||
.. } = filemap_to_import;
|
|
||||||
|
|
||||||
let source_length = (end_pos - start_pos).to_usize();
|
cstore::ImportedFileMap {
|
||||||
|
original_start_pos: start_pos,
|
||||||
// Translate line-start positions and multibyte character
|
original_end_pos: end_pos,
|
||||||
// position into frame of reference local to file.
|
translated_filemap: local_version,
|
||||||
// `CodeMap::new_imported_filemap()` will then translate those
|
}
|
||||||
// coordinates to their new global frame of reference when the
|
}).collect();
|
||||||
// offset of the FileMap is known.
|
|
||||||
let mut lines = lines.into_inner();
|
|
||||||
for pos in &mut lines {
|
|
||||||
*pos = *pos - start_pos;
|
|
||||||
}
|
|
||||||
let mut multibyte_chars = multibyte_chars.into_inner();
|
|
||||||
for mbc in &mut multibyte_chars {
|
|
||||||
mbc.pos = mbc.pos - start_pos;
|
|
||||||
}
|
|
||||||
|
|
||||||
let local_version = local_codemap.new_imported_filemap(name,
|
|
||||||
name_was_remapped,
|
|
||||||
source_length,
|
|
||||||
lines,
|
|
||||||
multibyte_chars);
|
|
||||||
debug!("CrateMetaData::imported_filemaps alloc \
|
|
||||||
filemap {:?} original (start_pos {:?} end_pos {:?}) \
|
|
||||||
translated (start_pos {:?} end_pos {:?})",
|
|
||||||
local_version.name, start_pos, end_pos,
|
|
||||||
local_version.start_pos, local_version.end_pos);
|
|
||||||
|
|
||||||
cstore::ImportedFileMap {
|
|
||||||
original_start_pos: start_pos,
|
|
||||||
original_end_pos: end_pos,
|
|
||||||
translated_filemap: local_version,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref.
|
// This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref.
|
||||||
*self.codemap_import_info.borrow_mut() = imported_filemaps;
|
*self.codemap_import_info.borrow_mut() = imported_filemaps;
|
||||||
self.codemap_import_info.borrow()
|
self.codemap_import_info.borrow()
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
pub fn metadata_dep_node(&self, kind: GlobalMetaDataKind) -> DepNode<DefId> {
|
||||||
fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, fm2: &syntax_pos::FileMap) -> bool {
|
let def_id = DefId {
|
||||||
if fm1.byte_length() != fm2.byte_length() {
|
krate: self.cnum,
|
||||||
return false;
|
index: CRATE_DEF_INDEX,
|
||||||
}
|
};
|
||||||
|
|
||||||
if fm1.name != fm2.name {
|
DepNode::GlobalMetaData(def_id, kind)
|
||||||
return false;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let lines1 = fm1.lines.borrow();
|
|
||||||
let lines2 = fm2.lines.borrow();
|
|
||||||
|
|
||||||
if lines1.len() != lines2.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (&line1, &line2) in lines1.iter().zip(lines2.iter()) {
|
|
||||||
if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let multibytes1 = fm1.multibyte_chars.borrow();
|
|
||||||
let multibytes2 = fm2.multibyte_chars.borrow();
|
|
||||||
|
|
||||||
if multibytes1.len() != multibytes2.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) {
|
|
||||||
if (mb1.bytes != mb2.bytes) || ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -58,20 +58,16 @@
|
||||||
use encoder::EncodeContext;
|
use encoder::EncodeContext;
|
||||||
use index::Index;
|
use index::Index;
|
||||||
use schema::*;
|
use schema::*;
|
||||||
|
use isolated_encoder::IsolatedEncoder;
|
||||||
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::ich::{StableHashingContext, Fingerprint};
|
|
||||||
use rustc::middle::cstore::EncodedMetadataHash;
|
use rustc::middle::cstore::EncodedMetadataHash;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
|
|
||||||
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
|
||||||
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
|
||||||
use rustc_serialize::Encodable;
|
|
||||||
|
|
||||||
/// Builder that can encode new items, adding them into the index.
|
/// Builder that can encode new items, adding them into the index.
|
||||||
/// Item encoding cannot be nested.
|
/// Item encoding cannot be nested.
|
||||||
pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> {
|
pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> {
|
||||||
|
@ -119,7 +115,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
|
||||||
/// content system.
|
/// content system.
|
||||||
pub fn record<'x, DATA>(&'x mut self,
|
pub fn record<'x, DATA>(&'x mut self,
|
||||||
id: DefId,
|
id: DefId,
|
||||||
op: fn(&mut EntryBuilder<'x, 'b, 'tcx>, DATA) -> Entry<'tcx>,
|
op: fn(&mut IsolatedEncoder<'x, 'b, 'tcx>, DATA) -> Entry<'tcx>,
|
||||||
data: DATA)
|
data: DATA)
|
||||||
where DATA: DepGraphRead
|
where DATA: DepGraphRead
|
||||||
{
|
{
|
||||||
|
@ -132,29 +128,19 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
|
||||||
// unclear whether that would be a win since hashing is cheap enough.
|
// unclear whether that would be a win since hashing is cheap enough.
|
||||||
let _task = tcx.dep_graph.in_ignore();
|
let _task = tcx.dep_graph.in_ignore();
|
||||||
|
|
||||||
let compute_ich = (tcx.sess.opts.debugging_opts.query_dep_graph ||
|
|
||||||
tcx.sess.opts.debugging_opts.incremental_cc) &&
|
|
||||||
tcx.sess.opts.build_dep_graph();
|
|
||||||
|
|
||||||
let ecx: &'x mut EncodeContext<'b, 'tcx> = &mut *self.ecx;
|
let ecx: &'x mut EncodeContext<'b, 'tcx> = &mut *self.ecx;
|
||||||
let mut entry_builder = EntryBuilder {
|
let mut entry_builder = IsolatedEncoder::new(ecx);
|
||||||
tcx: tcx,
|
|
||||||
ecx: ecx,
|
|
||||||
hcx: if compute_ich {
|
|
||||||
Some((StableHashingContext::new(tcx), StableHasher::new()))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let entry = op(&mut entry_builder, data);
|
let entry = op(&mut entry_builder, data);
|
||||||
|
let entry = entry_builder.lazy(&entry);
|
||||||
|
|
||||||
if let Some((ref mut hcx, ref mut hasher)) = entry_builder.hcx {
|
let (fingerprint, ecx) = entry_builder.finish();
|
||||||
entry.hash_stable(hcx, hasher);
|
if let Some(hash) = fingerprint {
|
||||||
|
ecx.metadata_hashes.entry_hashes.push(EncodedMetadataHash {
|
||||||
|
def_index: id.index,
|
||||||
|
hash: hash,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let entry = entry_builder.ecx.lazy(&entry);
|
|
||||||
entry_builder.finish(id);
|
|
||||||
self.items.record(id, entry);
|
self.items.record(id, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,91 +243,3 @@ impl<T> DepGraphRead for FromId<T> {
|
||||||
tcx.hir.read(self.0);
|
tcx.hir.read(self.0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct EntryBuilder<'a, 'b: 'a, 'tcx: 'b> {
|
|
||||||
pub tcx: TyCtxt<'b, 'tcx, 'tcx>,
|
|
||||||
ecx: &'a mut EncodeContext<'b, 'tcx>,
|
|
||||||
hcx: Option<(StableHashingContext<'b, 'tcx>, StableHasher<Fingerprint>)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'b: 'a, 'tcx: 'b> EntryBuilder<'a, 'b, 'tcx> {
|
|
||||||
|
|
||||||
pub fn finish(self, def_id: DefId) {
|
|
||||||
if let Some((_, hasher)) = self.hcx {
|
|
||||||
let hash = hasher.finish();
|
|
||||||
self.ecx.metadata_hashes.push(EncodedMetadataHash {
|
|
||||||
def_index: def_id.index,
|
|
||||||
hash: hash,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lazy<T>(&mut self, value: &T) -> Lazy<T>
|
|
||||||
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
|
||||||
{
|
|
||||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
|
||||||
value.hash_stable(hcx, hasher);
|
|
||||||
debug!("metadata-hash: {:?}", hasher);
|
|
||||||
}
|
|
||||||
self.ecx.lazy(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
|
|
||||||
where I: IntoIterator<Item = T>,
|
|
||||||
T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
|
||||||
{
|
|
||||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
|
||||||
let iter = iter.into_iter();
|
|
||||||
let (lower_bound, upper_bound) = iter.size_hint();
|
|
||||||
|
|
||||||
if upper_bound == Some(lower_bound) {
|
|
||||||
lower_bound.hash_stable(hcx, hasher);
|
|
||||||
let mut num_items_hashed = 0;
|
|
||||||
let ret = self.ecx.lazy_seq(iter.inspect(|item| {
|
|
||||||
item.hash_stable(hcx, hasher);
|
|
||||||
num_items_hashed += 1;
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Sometimes items in a sequence are filtered out without being
|
|
||||||
// hashed (e.g. for &[ast::Attribute]) and this code path cannot
|
|
||||||
// handle that correctly, so we want to make sure we didn't hit
|
|
||||||
// it by accident.
|
|
||||||
if lower_bound != num_items_hashed {
|
|
||||||
bug!("Hashed a different number of items ({}) than expected ({})",
|
|
||||||
num_items_hashed,
|
|
||||||
lower_bound);
|
|
||||||
}
|
|
||||||
debug!("metadata-hash: {:?}", hasher);
|
|
||||||
ret
|
|
||||||
} else {
|
|
||||||
// Collect into a vec so we know the length of the sequence
|
|
||||||
let items: AccumulateVec<[T; 32]> = iter.collect();
|
|
||||||
items.hash_stable(hcx, hasher);
|
|
||||||
debug!("metadata-hash: {:?}", hasher);
|
|
||||||
self.ecx.lazy_seq(items)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.ecx.lazy_seq(iter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lazy_seq_from_slice<T>(&mut self, slice: &[T]) -> LazySeq<T>
|
|
||||||
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
|
||||||
{
|
|
||||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
|
||||||
slice.hash_stable(hcx, hasher);
|
|
||||||
debug!("metadata-hash: {:?}", hasher);
|
|
||||||
}
|
|
||||||
self.ecx.lazy_seq_ref(slice.iter())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lazy_seq_ref_from_slice<T>(&mut self, slice: &[&T]) -> LazySeq<T>
|
|
||||||
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
|
||||||
{
|
|
||||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
|
||||||
slice.hash_stable(hcx, hasher);
|
|
||||||
debug!("metadata-hash: {:?}", hasher);
|
|
||||||
}
|
|
||||||
self.ecx.lazy_seq_ref(slice.iter().map(|x| *x))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
160
src/librustc_metadata/isolated_encoder.rs
Normal file
160
src/librustc_metadata/isolated_encoder.rs
Normal file
|
@ -0,0 +1,160 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use encoder::EncodeContext;
|
||||||
|
use schema::{Lazy, LazySeq};
|
||||||
|
|
||||||
|
use rustc::ich::{StableHashingContext, Fingerprint};
|
||||||
|
use rustc::ty::TyCtxt;
|
||||||
|
|
||||||
|
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||||
|
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
||||||
|
use rustc_serialize::Encodable;
|
||||||
|
|
||||||
|
/// The IsolatedEncoder provides facilities to write to crate metadata while
|
||||||
|
/// making sure that anything going through it is also feed into an ICH hasher.
|
||||||
|
pub struct IsolatedEncoder<'a, 'b: 'a, 'tcx: 'b> {
|
||||||
|
pub tcx: TyCtxt<'b, 'tcx, 'tcx>,
|
||||||
|
ecx: &'a mut EncodeContext<'b, 'tcx>,
|
||||||
|
hcx: Option<(StableHashingContext<'b, 'tcx>, StableHasher<Fingerprint>)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||||
|
|
||||||
|
pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
|
||||||
|
let tcx = ecx.tcx;
|
||||||
|
let compute_ich = ecx.compute_ich;
|
||||||
|
IsolatedEncoder {
|
||||||
|
tcx: tcx,
|
||||||
|
ecx: ecx,
|
||||||
|
hcx: if compute_ich {
|
||||||
|
Some((StableHashingContext::new(tcx), StableHasher::new()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(self) -> (Option<Fingerprint>, &'a mut EncodeContext<'b, 'tcx>) {
|
||||||
|
if let Some((_, hasher)) = self.hcx {
|
||||||
|
(Some(hasher.finish()), self.ecx)
|
||||||
|
} else {
|
||||||
|
(None, self.ecx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lazy<T>(&mut self, value: &T) -> Lazy<T>
|
||||||
|
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
||||||
|
{
|
||||||
|
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||||
|
value.hash_stable(hcx, hasher);
|
||||||
|
debug!("metadata-hash: {:?}", hasher);
|
||||||
|
}
|
||||||
|
self.ecx.lazy(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
|
||||||
|
where I: IntoIterator<Item = T>,
|
||||||
|
T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
||||||
|
{
|
||||||
|
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||||
|
let iter = iter.into_iter();
|
||||||
|
let (lower_bound, upper_bound) = iter.size_hint();
|
||||||
|
|
||||||
|
if upper_bound == Some(lower_bound) {
|
||||||
|
lower_bound.hash_stable(hcx, hasher);
|
||||||
|
let mut num_items_hashed = 0;
|
||||||
|
let ret = self.ecx.lazy_seq(iter.inspect(|item| {
|
||||||
|
item.hash_stable(hcx, hasher);
|
||||||
|
num_items_hashed += 1;
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Sometimes items in a sequence are filtered out without being
|
||||||
|
// hashed (e.g. for &[ast::Attribute]) and this code path cannot
|
||||||
|
// handle that correctly, so we want to make sure we didn't hit
|
||||||
|
// it by accident.
|
||||||
|
if lower_bound != num_items_hashed {
|
||||||
|
bug!("Hashed a different number of items ({}) than expected ({})",
|
||||||
|
num_items_hashed,
|
||||||
|
lower_bound);
|
||||||
|
}
|
||||||
|
debug!("metadata-hash: {:?}", hasher);
|
||||||
|
ret
|
||||||
|
} else {
|
||||||
|
// Collect into a vec so we know the length of the sequence
|
||||||
|
let items: AccumulateVec<[T; 32]> = iter.collect();
|
||||||
|
items.hash_stable(hcx, hasher);
|
||||||
|
debug!("metadata-hash: {:?}", hasher);
|
||||||
|
self.ecx.lazy_seq(items)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.ecx.lazy_seq(iter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lazy_seq_ref<'x, I, T>(&mut self, iter: I) -> LazySeq<T>
|
||||||
|
where I: IntoIterator<Item = &'x T>,
|
||||||
|
T: 'x + Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
||||||
|
{
|
||||||
|
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||||
|
let iter = iter.into_iter();
|
||||||
|
let (lower_bound, upper_bound) = iter.size_hint();
|
||||||
|
|
||||||
|
if upper_bound == Some(lower_bound) {
|
||||||
|
lower_bound.hash_stable(hcx, hasher);
|
||||||
|
let mut num_items_hashed = 0;
|
||||||
|
let ret = self.ecx.lazy_seq_ref(iter.inspect(|item| {
|
||||||
|
item.hash_stable(hcx, hasher);
|
||||||
|
num_items_hashed += 1;
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Sometimes items in a sequence are filtered out without being
|
||||||
|
// hashed (e.g. for &[ast::Attribute]) and this code path cannot
|
||||||
|
// handle that correctly, so we want to make sure we didn't hit
|
||||||
|
// it by accident.
|
||||||
|
if lower_bound != num_items_hashed {
|
||||||
|
bug!("Hashed a different number of items ({}) than expected ({})",
|
||||||
|
num_items_hashed,
|
||||||
|
lower_bound);
|
||||||
|
}
|
||||||
|
debug!("metadata-hash: {:?}", hasher);
|
||||||
|
ret
|
||||||
|
} else {
|
||||||
|
// Collect into a vec so we know the length of the sequence
|
||||||
|
let items: AccumulateVec<[&'x T; 32]> = iter.collect();
|
||||||
|
items.hash_stable(hcx, hasher);
|
||||||
|
debug!("metadata-hash: {:?}", hasher);
|
||||||
|
self.ecx.lazy_seq_ref(items.iter().map(|x| *x))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.ecx.lazy_seq_ref(iter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lazy_seq_from_slice<T>(&mut self, slice: &[T]) -> LazySeq<T>
|
||||||
|
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
||||||
|
{
|
||||||
|
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||||
|
slice.hash_stable(hcx, hasher);
|
||||||
|
debug!("metadata-hash: {:?}", hasher);
|
||||||
|
}
|
||||||
|
self.ecx.lazy_seq_ref(slice.iter())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lazy_seq_ref_from_slice<T>(&mut self, slice: &[&T]) -> LazySeq<T>
|
||||||
|
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx>>
|
||||||
|
{
|
||||||
|
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||||
|
slice.hash_stable(hcx, hasher);
|
||||||
|
debug!("metadata-hash: {:?}", hasher);
|
||||||
|
}
|
||||||
|
self.ecx.lazy_seq_ref(slice.iter().map(|x| *x))
|
||||||
|
}
|
||||||
|
}
|
|
@ -57,6 +57,7 @@ mod index;
|
||||||
mod encoder;
|
mod encoder;
|
||||||
mod decoder;
|
mod decoder;
|
||||||
mod cstore_impl;
|
mod cstore_impl;
|
||||||
|
mod isolated_encoder;
|
||||||
mod schema;
|
mod schema;
|
||||||
|
|
||||||
pub mod creader;
|
pub mod creader;
|
||||||
|
|
|
@ -13,7 +13,7 @@ use index;
|
||||||
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::def::{self, CtorKind};
|
use rustc::hir::def::{self, CtorKind};
|
||||||
use rustc::hir::def_id::{DefIndex, DefId};
|
use rustc::hir::def_id::{DefIndex, DefId, CrateNum};
|
||||||
use rustc::ich::StableHashingContext;
|
use rustc::ich::StableHashingContext;
|
||||||
use rustc::middle::cstore::{DepKind, LinkagePreference, NativeLibrary};
|
use rustc::middle::cstore::{DepKind, LinkagePreference, NativeLibrary};
|
||||||
use rustc::middle::lang_items;
|
use rustc::middle::lang_items;
|
||||||
|
@ -32,6 +32,8 @@ use std::mem;
|
||||||
use rustc_data_structures::stable_hasher::{StableHasher, HashStable,
|
use rustc_data_structures::stable_hasher::{StableHasher, HashStable,
|
||||||
StableHasherResult};
|
StableHasherResult};
|
||||||
|
|
||||||
|
use rustc::dep_graph::{DepGraph, DepNode};
|
||||||
|
|
||||||
pub fn rustc_version() -> String {
|
pub fn rustc_version() -> String {
|
||||||
format!("rustc {}",
|
format!("rustc {}",
|
||||||
option_env!("CFG_VERSION").unwrap_or("unknown version"))
|
option_env!("CFG_VERSION").unwrap_or("unknown version"))
|
||||||
|
@ -186,25 +188,59 @@ pub enum LazyState {
|
||||||
Previous(usize),
|
Previous(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A `Tracked<T>` wraps a value so that one can only access it when specifying
|
||||||
|
/// the `DepNode` for that value. This makes it harder to forget registering
|
||||||
|
/// reads.
|
||||||
|
#[derive(RustcEncodable, RustcDecodable)]
|
||||||
|
pub struct Tracked<T> {
|
||||||
|
state: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Tracked<T> {
|
||||||
|
pub fn new(state: T) -> Tracked<T> {
|
||||||
|
Tracked {
|
||||||
|
state: state,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, dep_graph: &DepGraph, dep_node: DepNode<DefId>) -> &T {
|
||||||
|
dep_graph.read(dep_node);
|
||||||
|
&self.state
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_untracked(&self) -> &T {
|
||||||
|
&self.state
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn map<F, R>(&self, f: F) -> Tracked<R>
|
||||||
|
where F: FnOnce(&T) -> R
|
||||||
|
{
|
||||||
|
Tracked {
|
||||||
|
state: f(&self.state),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(RustcEncodable, RustcDecodable)]
|
#[derive(RustcEncodable, RustcDecodable)]
|
||||||
pub struct CrateRoot {
|
pub struct CrateRoot {
|
||||||
pub name: Symbol,
|
pub name: Symbol,
|
||||||
pub triple: String,
|
pub triple: String,
|
||||||
pub hash: hir::svh::Svh,
|
pub hash: hir::svh::Svh,
|
||||||
pub disambiguator: Symbol,
|
pub disambiguator: Symbol,
|
||||||
pub panic_strategy: PanicStrategy,
|
pub panic_strategy: Tracked<PanicStrategy>,
|
||||||
pub plugin_registrar_fn: Option<DefIndex>,
|
pub plugin_registrar_fn: Option<DefIndex>,
|
||||||
pub macro_derive_registrar: Option<DefIndex>,
|
pub macro_derive_registrar: Option<DefIndex>,
|
||||||
|
|
||||||
pub crate_deps: LazySeq<CrateDep>,
|
pub crate_deps: Tracked<LazySeq<CrateDep>>,
|
||||||
pub dylib_dependency_formats: LazySeq<Option<LinkagePreference>>,
|
pub dylib_dependency_formats: Tracked<LazySeq<Option<LinkagePreference>>>,
|
||||||
pub lang_items: LazySeq<(DefIndex, usize)>,
|
pub lang_items: Tracked<LazySeq<(DefIndex, usize)>>,
|
||||||
pub lang_items_missing: LazySeq<lang_items::LangItem>,
|
pub lang_items_missing: Tracked<LazySeq<lang_items::LangItem>>,
|
||||||
pub native_libraries: LazySeq<NativeLibrary>,
|
pub native_libraries: Tracked<LazySeq<NativeLibrary>>,
|
||||||
pub codemap: LazySeq<syntax_pos::FileMap>,
|
pub codemap: LazySeq<syntax_pos::FileMap>,
|
||||||
pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
|
pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
|
||||||
pub impls: LazySeq<TraitImpls>,
|
pub impls: Tracked<LazySeq<TraitImpls>>,
|
||||||
pub exported_symbols: LazySeq<DefIndex>,
|
pub exported_symbols: Tracked<LazySeq<DefIndex>>,
|
||||||
pub index: LazySeq<index::Index>,
|
pub index: LazySeq<index::Index>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,12 +251,35 @@ pub struct CrateDep {
|
||||||
pub kind: DepKind,
|
pub kind: DepKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl_stable_hash_for!(struct CrateDep {
|
||||||
|
name,
|
||||||
|
hash,
|
||||||
|
kind
|
||||||
|
});
|
||||||
|
|
||||||
#[derive(RustcEncodable, RustcDecodable)]
|
#[derive(RustcEncodable, RustcDecodable)]
|
||||||
pub struct TraitImpls {
|
pub struct TraitImpls {
|
||||||
pub trait_id: (u32, DefIndex),
|
pub trait_id: (u32, DefIndex),
|
||||||
pub impls: LazySeq<DefIndex>,
|
pub impls: LazySeq<DefIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for TraitImpls {
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self,
|
||||||
|
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||||
|
hasher: &mut StableHasher<W>) {
|
||||||
|
let TraitImpls {
|
||||||
|
trait_id: (krate, def_index),
|
||||||
|
ref impls,
|
||||||
|
} = *self;
|
||||||
|
|
||||||
|
DefId {
|
||||||
|
krate: CrateNum::from_u32(krate),
|
||||||
|
index: def_index
|
||||||
|
}.hash_stable(hcx, hasher);
|
||||||
|
impls.hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(RustcEncodable, RustcDecodable)]
|
#[derive(RustcEncodable, RustcDecodable)]
|
||||||
pub struct Entry<'tcx> {
|
pub struct Entry<'tcx> {
|
||||||
pub kind: EntryKind<'tcx>,
|
pub kind: EntryKind<'tcx>,
|
||||||
|
|
|
@ -754,10 +754,7 @@ fn write_metadata<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>,
|
||||||
}).max().unwrap();
|
}).max().unwrap();
|
||||||
|
|
||||||
if kind == MetadataKind::None {
|
if kind == MetadataKind::None {
|
||||||
return (metadata_llcx, metadata_llmod, EncodedMetadata {
|
return (metadata_llcx, metadata_llmod, EncodedMetadata::new());
|
||||||
raw_data: vec![],
|
|
||||||
hashes: vec![],
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let cstore = &tcx.sess.cstore;
|
let cstore = &tcx.sess.cstore;
|
||||||
|
|
|
@ -21,8 +21,8 @@ pub use syntax_pos::*;
|
||||||
pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo, NameAndSpan};
|
pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo, NameAndSpan};
|
||||||
pub use self::ExpnFormat::*;
|
pub use self::ExpnFormat::*;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::{RefCell, Ref};
|
||||||
use std::path::{Path,PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
@ -103,11 +103,18 @@ impl FileLoader for RealFileLoader {
|
||||||
//
|
//
|
||||||
|
|
||||||
pub struct CodeMap {
|
pub struct CodeMap {
|
||||||
pub files: RefCell<Vec<Rc<FileMap>>>,
|
// The `files` field should not be visible outside of libsyntax so that we
|
||||||
|
// can do proper dependency tracking.
|
||||||
|
pub(super) files: RefCell<Vec<Rc<FileMap>>>,
|
||||||
file_loader: Box<FileLoader>,
|
file_loader: Box<FileLoader>,
|
||||||
// This is used to apply the file path remapping as specified via
|
// This is used to apply the file path remapping as specified via
|
||||||
// -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
|
// -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
|
||||||
path_mapping: FilePathMapping,
|
path_mapping: FilePathMapping,
|
||||||
|
// The CodeMap will invoke this callback whenever a specific FileMap is
|
||||||
|
// accessed. The callback starts out as a no-op but when the dependency
|
||||||
|
// graph becomes available later during the compilation process, it is
|
||||||
|
// be replaced with something that notifies the dep-tracking system.
|
||||||
|
dep_tracking_callback: RefCell<Box<Fn(&FileMap)>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CodeMap {
|
impl CodeMap {
|
||||||
|
@ -116,6 +123,7 @@ impl CodeMap {
|
||||||
files: RefCell::new(Vec::new()),
|
files: RefCell::new(Vec::new()),
|
||||||
file_loader: Box::new(RealFileLoader),
|
file_loader: Box::new(RealFileLoader),
|
||||||
path_mapping: path_mapping,
|
path_mapping: path_mapping,
|
||||||
|
dep_tracking_callback: RefCell::new(Box::new(|_| {})),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,6 +134,7 @@ impl CodeMap {
|
||||||
files: RefCell::new(Vec::new()),
|
files: RefCell::new(Vec::new()),
|
||||||
file_loader: file_loader,
|
file_loader: file_loader,
|
||||||
path_mapping: path_mapping,
|
path_mapping: path_mapping,
|
||||||
|
dep_tracking_callback: RefCell::new(Box::new(|_| {})),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,6 +142,10 @@ impl CodeMap {
|
||||||
&self.path_mapping
|
&self.path_mapping
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_dep_tracking_callback(&self, cb: Box<Fn(&FileMap)>) {
|
||||||
|
*self.dep_tracking_callback.borrow_mut() = cb;
|
||||||
|
}
|
||||||
|
|
||||||
pub fn file_exists(&self, path: &Path) -> bool {
|
pub fn file_exists(&self, path: &Path) -> bool {
|
||||||
self.file_loader.file_exists(path)
|
self.file_loader.file_exists(path)
|
||||||
}
|
}
|
||||||
|
@ -142,6 +155,19 @@ impl CodeMap {
|
||||||
Ok(self.new_filemap(path.to_str().unwrap().to_string(), src))
|
Ok(self.new_filemap(path.to_str().unwrap().to_string(), src))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn files(&self) -> Ref<Vec<Rc<FileMap>>> {
|
||||||
|
let files = self.files.borrow();
|
||||||
|
for file in files.iter() {
|
||||||
|
(self.dep_tracking_callback.borrow())(file);
|
||||||
|
}
|
||||||
|
files
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Only use this if you do your own dependency tracking!
|
||||||
|
pub fn files_untracked(&self) -> Ref<Vec<Rc<FileMap>>> {
|
||||||
|
self.files.borrow()
|
||||||
|
}
|
||||||
|
|
||||||
fn next_start_pos(&self) -> usize {
|
fn next_start_pos(&self) -> usize {
|
||||||
let files = self.files.borrow();
|
let files = self.files.borrow();
|
||||||
match files.last() {
|
match files.last() {
|
||||||
|
@ -170,6 +196,7 @@ impl CodeMap {
|
||||||
let filemap = Rc::new(FileMap {
|
let filemap = Rc::new(FileMap {
|
||||||
name: filename,
|
name: filename,
|
||||||
name_was_remapped: was_remapped,
|
name_was_remapped: was_remapped,
|
||||||
|
crate_of_origin: 0,
|
||||||
src: Some(Rc::new(src)),
|
src: Some(Rc::new(src)),
|
||||||
start_pos: Pos::from_usize(start_pos),
|
start_pos: Pos::from_usize(start_pos),
|
||||||
end_pos: Pos::from_usize(end_pos),
|
end_pos: Pos::from_usize(end_pos),
|
||||||
|
@ -204,6 +231,7 @@ impl CodeMap {
|
||||||
pub fn new_imported_filemap(&self,
|
pub fn new_imported_filemap(&self,
|
||||||
filename: FileName,
|
filename: FileName,
|
||||||
name_was_remapped: bool,
|
name_was_remapped: bool,
|
||||||
|
crate_of_origin: u32,
|
||||||
source_len: usize,
|
source_len: usize,
|
||||||
mut file_local_lines: Vec<BytePos>,
|
mut file_local_lines: Vec<BytePos>,
|
||||||
mut file_local_multibyte_chars: Vec<MultiByteChar>)
|
mut file_local_multibyte_chars: Vec<MultiByteChar>)
|
||||||
|
@ -225,6 +253,7 @@ impl CodeMap {
|
||||||
let filemap = Rc::new(FileMap {
|
let filemap = Rc::new(FileMap {
|
||||||
name: filename,
|
name: filename,
|
||||||
name_was_remapped: name_was_remapped,
|
name_was_remapped: name_was_remapped,
|
||||||
|
crate_of_origin: crate_of_origin,
|
||||||
src: None,
|
src: None,
|
||||||
start_pos: start_pos,
|
start_pos: start_pos,
|
||||||
end_pos: end_pos,
|
end_pos: end_pos,
|
||||||
|
@ -282,6 +311,8 @@ impl CodeMap {
|
||||||
let files = self.files.borrow();
|
let files = self.files.borrow();
|
||||||
let f = (*files)[idx].clone();
|
let f = (*files)[idx].clone();
|
||||||
|
|
||||||
|
(self.dep_tracking_callback.borrow())(&f);
|
||||||
|
|
||||||
match f.lookup_line(pos) {
|
match f.lookup_line(pos) {
|
||||||
Some(line) => Ok(FileMapAndLine { fm: f, line: line }),
|
Some(line) => Ok(FileMapAndLine { fm: f, line: line }),
|
||||||
None => Err(f)
|
None => Err(f)
|
||||||
|
@ -471,6 +502,7 @@ impl CodeMap {
|
||||||
pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
|
pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
|
||||||
for fm in self.files.borrow().iter() {
|
for fm in self.files.borrow().iter() {
|
||||||
if filename == fm.name {
|
if filename == fm.name {
|
||||||
|
(self.dep_tracking_callback.borrow())(&fm);
|
||||||
return Some(fm.clone());
|
return Some(fm.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -481,6 +513,7 @@ impl CodeMap {
|
||||||
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
|
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
|
||||||
let idx = self.lookup_filemap_idx(bpos);
|
let idx = self.lookup_filemap_idx(bpos);
|
||||||
let fm = (*self.files.borrow())[idx].clone();
|
let fm = (*self.files.borrow())[idx].clone();
|
||||||
|
(self.dep_tracking_callback.borrow())(&fm);
|
||||||
let offset = bpos - fm.start_pos;
|
let offset = bpos - fm.start_pos;
|
||||||
FileMapAndBytePos {fm: fm, pos: offset}
|
FileMapAndBytePos {fm: fm, pos: offset}
|
||||||
}
|
}
|
||||||
|
@ -491,6 +524,8 @@ impl CodeMap {
|
||||||
let files = self.files.borrow();
|
let files = self.files.borrow();
|
||||||
let map = &(*files)[idx];
|
let map = &(*files)[idx];
|
||||||
|
|
||||||
|
(self.dep_tracking_callback.borrow())(map);
|
||||||
|
|
||||||
// The number of extra bytes due to multibyte chars in the FileMap
|
// The number of extra bytes due to multibyte chars in the FileMap
|
||||||
let mut total_extra_bytes = 0;
|
let mut total_extra_bytes = 0;
|
||||||
|
|
||||||
|
@ -536,7 +571,7 @@ impl CodeMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn count_lines(&self) -> usize {
|
pub fn count_lines(&self) -> usize {
|
||||||
self.files.borrow().iter().fold(0, |a, f| a + f.count_lines())
|
self.files().iter().fold(0, |a, f| a + f.count_lines())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -377,6 +377,8 @@ pub struct FileMap {
|
||||||
pub name: FileName,
|
pub name: FileName,
|
||||||
/// True if the `name` field above has been modified by -Zremap-path-prefix
|
/// True if the `name` field above has been modified by -Zremap-path-prefix
|
||||||
pub name_was_remapped: bool,
|
pub name_was_remapped: bool,
|
||||||
|
/// Indicates which crate this FileMap was imported from.
|
||||||
|
pub crate_of_origin: u32,
|
||||||
/// The complete source code
|
/// The complete source code
|
||||||
pub src: Option<Rc<String>>,
|
pub src: Option<Rc<String>>,
|
||||||
/// The start position of this source in the CodeMap
|
/// The start position of this source in the CodeMap
|
||||||
|
@ -491,6 +493,8 @@ impl Decodable for FileMap {
|
||||||
Ok(FileMap {
|
Ok(FileMap {
|
||||||
name: name,
|
name: name,
|
||||||
name_was_remapped: name_was_remapped,
|
name_was_remapped: name_was_remapped,
|
||||||
|
// `crate_of_origin` has to be set by the importer.
|
||||||
|
crate_of_origin: 0xEFFF_FFFF,
|
||||||
start_pos: start_pos,
|
start_pos: start_pos,
|
||||||
end_pos: end_pos,
|
end_pos: end_pos,
|
||||||
src: None,
|
src: None,
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// ignore-tidy-linelength
|
||||||
|
|
||||||
|
// aux-build:extern_crate.rs
|
||||||
|
//[rpass1] compile-flags: -g
|
||||||
|
//[rpass2] compile-flags: -g
|
||||||
|
//[rpass3] compile-flags: -g -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src
|
||||||
|
|
||||||
|
#![feature(rustc_attrs)]
|
||||||
|
#![crate_type="rlib"]
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn inline_fn() {
|
||||||
|
println!("test");
|
||||||
|
}
|
42
src/test/incremental/remapped_paths_cc/main.rs
Normal file
42
src/test/incremental/remapped_paths_cc/main.rs
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// revisions:rpass1 rpass2 rpass3
|
||||||
|
// compile-flags: -Z query-dep-graph -g
|
||||||
|
// aux-build:extern_crate.rs
|
||||||
|
|
||||||
|
|
||||||
|
// This test case makes sure that we detect if paths emitted into debuginfo
|
||||||
|
// are changed, even when the change happens in an external crate.
|
||||||
|
|
||||||
|
#![feature(rustc_attrs)]
|
||||||
|
|
||||||
|
#![rustc_partition_reused(module="main", cfg="rpass2")]
|
||||||
|
#![rustc_partition_reused(module="main-some_mod", cfg="rpass2")]
|
||||||
|
#![rustc_partition_reused(module="main", cfg="rpass3")]
|
||||||
|
#![rustc_partition_translated(module="main-some_mod", cfg="rpass3")]
|
||||||
|
|
||||||
|
extern crate extern_crate;
|
||||||
|
|
||||||
|
#[rustc_clean(label="TransCrateItem", cfg="rpass2")]
|
||||||
|
#[rustc_clean(label="TransCrateItem", cfg="rpass3")]
|
||||||
|
fn main() {
|
||||||
|
some_mod::some_fn();
|
||||||
|
}
|
||||||
|
|
||||||
|
mod some_mod {
|
||||||
|
use extern_crate;
|
||||||
|
|
||||||
|
#[rustc_clean(label="TransCrateItem", cfg="rpass2")]
|
||||||
|
#[rustc_dirty(label="TransCrateItem", cfg="rpass3")]
|
||||||
|
pub fn some_fn() {
|
||||||
|
extern_crate::inline_fn();
|
||||||
|
}
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue