Auto merge of #100209 - cjgillot:source-file-index, r=estebank
Lazily decode SourceFile from metadata Currently, source files from foreign crates are decoded up-front from metadata. Spans from those crates were matched with the corresponding source using binary search among those files. This PR changes the strategy by matching spans to files during encoding. This allows to decode source files on-demand, instead of up-front. The on-disk format for spans becomes: `<tag> <position from start of file> <length> <file index> <crate (if foreign file)>`.
This commit is contained in:
commit
468887ef91
8 changed files with 199 additions and 219 deletions
|
@ -99,7 +99,7 @@ pub(crate) struct CrateMetadata {
|
||||||
/// Proc macro descriptions for this crate, if it's a proc macro crate.
|
/// Proc macro descriptions for this crate, if it's a proc macro crate.
|
||||||
raw_proc_macros: Option<&'static [ProcMacro]>,
|
raw_proc_macros: Option<&'static [ProcMacro]>,
|
||||||
/// Source maps for code from the crate.
|
/// Source maps for code from the crate.
|
||||||
source_map_import_info: OnceCell<Vec<ImportedSourceFile>>,
|
source_map_import_info: Lock<Vec<Option<ImportedSourceFile>>>,
|
||||||
/// For every definition in this crate, maps its `DefPathHash` to its `DefIndex`.
|
/// For every definition in this crate, maps its `DefPathHash` to its `DefIndex`.
|
||||||
def_path_hash_map: DefPathHashMapRef<'static>,
|
def_path_hash_map: DefPathHashMapRef<'static>,
|
||||||
/// Likewise for ExpnHash.
|
/// Likewise for ExpnHash.
|
||||||
|
@ -143,7 +143,8 @@ pub(crate) struct CrateMetadata {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Holds information about a rustc_span::SourceFile imported from another crate.
|
/// Holds information about a rustc_span::SourceFile imported from another crate.
|
||||||
/// See `imported_source_files()` for more information.
|
/// See `imported_source_file()` for more information.
|
||||||
|
#[derive(Clone)]
|
||||||
struct ImportedSourceFile {
|
struct ImportedSourceFile {
|
||||||
/// This SourceFile's byte-offset within the source_map of its original crate
|
/// This SourceFile's byte-offset within the source_map of its original crate
|
||||||
original_start_pos: rustc_span::BytePos,
|
original_start_pos: rustc_span::BytePos,
|
||||||
|
@ -160,9 +161,6 @@ pub(super) struct DecodeContext<'a, 'tcx> {
|
||||||
sess: Option<&'tcx Session>,
|
sess: Option<&'tcx Session>,
|
||||||
tcx: Option<TyCtxt<'tcx>>,
|
tcx: Option<TyCtxt<'tcx>>,
|
||||||
|
|
||||||
// Cache the last used source_file for translating spans as an optimization.
|
|
||||||
last_source_file_index: usize,
|
|
||||||
|
|
||||||
lazy_state: LazyState,
|
lazy_state: LazyState,
|
||||||
|
|
||||||
// Used for decoding interpret::AllocIds in a cached & thread-safe manner.
|
// Used for decoding interpret::AllocIds in a cached & thread-safe manner.
|
||||||
|
@ -191,7 +189,6 @@ pub(super) trait Metadata<'a, 'tcx>: Copy {
|
||||||
blob: self.blob(),
|
blob: self.blob(),
|
||||||
sess: self.sess().or(tcx.map(|tcx| tcx.sess)),
|
sess: self.sess().or(tcx.map(|tcx| tcx.sess)),
|
||||||
tcx,
|
tcx,
|
||||||
last_source_file_index: 0,
|
|
||||||
lazy_state: LazyState::NoNode,
|
lazy_state: LazyState::NoNode,
|
||||||
alloc_decoding_session: self
|
alloc_decoding_session: self
|
||||||
.cdata()
|
.cdata()
|
||||||
|
@ -527,6 +524,9 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
|
||||||
bug!("Cannot decode Span without Session.")
|
bug!("Cannot decode Span without Session.")
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Index of the file in the corresponding crate's list of encoded files.
|
||||||
|
let metadata_index = u32::decode(decoder);
|
||||||
|
|
||||||
// There are two possibilities here:
|
// There are two possibilities here:
|
||||||
// 1. This is a 'local span', which is located inside a `SourceFile`
|
// 1. This is a 'local span', which is located inside a `SourceFile`
|
||||||
// that came from this crate. In this case, we use the source map data
|
// that came from this crate. In this case, we use the source map data
|
||||||
|
@ -553,10 +553,10 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
|
||||||
// to be based on the *foreign* crate (e.g. crate C), not the crate
|
// to be based on the *foreign* crate (e.g. crate C), not the crate
|
||||||
// we are writing metadata for (e.g. crate B). This allows us to
|
// we are writing metadata for (e.g. crate B). This allows us to
|
||||||
// treat the 'local' and 'foreign' cases almost identically during deserialization:
|
// treat the 'local' and 'foreign' cases almost identically during deserialization:
|
||||||
// we can call `imported_source_files` for the proper crate, and binary search
|
// we can call `imported_source_file` for the proper crate, and binary search
|
||||||
// through the returned slice using our span.
|
// through the returned slice using our span.
|
||||||
let imported_source_files = if tag == TAG_VALID_SPAN_LOCAL {
|
let source_file = if tag == TAG_VALID_SPAN_LOCAL {
|
||||||
decoder.cdata().imported_source_files(sess)
|
decoder.cdata().imported_source_file(metadata_index, sess)
|
||||||
} else {
|
} else {
|
||||||
// When we encode a proc-macro crate, all `Span`s should be encoded
|
// When we encode a proc-macro crate, all `Span`s should be encoded
|
||||||
// with `TAG_VALID_SPAN_LOCAL`
|
// with `TAG_VALID_SPAN_LOCAL`
|
||||||
|
@ -577,60 +577,30 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
|
||||||
cnum
|
cnum
|
||||||
);
|
);
|
||||||
|
|
||||||
// Decoding 'foreign' spans should be rare enough that it's
|
|
||||||
// not worth it to maintain a per-CrateNum cache for `last_source_file_index`.
|
|
||||||
// We just set it to 0, to ensure that we don't try to access something out
|
|
||||||
// of bounds for our initial 'guess'
|
|
||||||
decoder.last_source_file_index = 0;
|
|
||||||
|
|
||||||
let foreign_data = decoder.cdata().cstore.get_crate_data(cnum);
|
let foreign_data = decoder.cdata().cstore.get_crate_data(cnum);
|
||||||
foreign_data.imported_source_files(sess)
|
foreign_data.imported_source_file(metadata_index, sess)
|
||||||
};
|
};
|
||||||
|
|
||||||
let source_file = {
|
// Make sure our span is well-formed.
|
||||||
// Optimize for the case that most spans within a translated item
|
|
||||||
// originate from the same source_file.
|
|
||||||
let last_source_file = &imported_source_files[decoder.last_source_file_index];
|
|
||||||
|
|
||||||
if lo >= last_source_file.original_start_pos && lo <= last_source_file.original_end_pos
|
|
||||||
{
|
|
||||||
last_source_file
|
|
||||||
} else {
|
|
||||||
let index = imported_source_files
|
|
||||||
.binary_search_by_key(&lo, |source_file| source_file.original_start_pos)
|
|
||||||
.unwrap_or_else(|index| index - 1);
|
|
||||||
|
|
||||||
// Don't try to cache the index for foreign spans,
|
|
||||||
// as this would require a map from CrateNums to indices
|
|
||||||
if tag == TAG_VALID_SPAN_LOCAL {
|
|
||||||
decoder.last_source_file_index = index;
|
|
||||||
}
|
|
||||||
&imported_source_files[index]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Make sure our binary search above is correct.
|
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
lo >= source_file.original_start_pos && lo <= source_file.original_end_pos,
|
lo + source_file.original_start_pos <= source_file.original_end_pos,
|
||||||
"Bad binary search: lo={:?} source_file.original_start_pos={:?} source_file.original_end_pos={:?}",
|
"Malformed encoded span: lo={:?} source_file.original_start_pos={:?} source_file.original_end_pos={:?}",
|
||||||
lo,
|
lo,
|
||||||
source_file.original_start_pos,
|
source_file.original_start_pos,
|
||||||
source_file.original_end_pos
|
source_file.original_end_pos
|
||||||
);
|
);
|
||||||
|
|
||||||
// Make sure we correctly filtered out invalid spans during encoding
|
// Make sure we correctly filtered out invalid spans during encoding.
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
hi >= source_file.original_start_pos && hi <= source_file.original_end_pos,
|
hi + source_file.original_start_pos <= source_file.original_end_pos,
|
||||||
"Bad binary search: hi={:?} source_file.original_start_pos={:?} source_file.original_end_pos={:?}",
|
"Malformed encoded span: hi={:?} source_file.original_start_pos={:?} source_file.original_end_pos={:?}",
|
||||||
hi,
|
hi,
|
||||||
source_file.original_start_pos,
|
source_file.original_start_pos,
|
||||||
source_file.original_end_pos
|
source_file.original_end_pos
|
||||||
);
|
);
|
||||||
|
|
||||||
let lo =
|
let lo = lo + source_file.translated_source_file.start_pos;
|
||||||
(lo + source_file.translated_source_file.start_pos) - source_file.original_start_pos;
|
let hi = hi + source_file.translated_source_file.start_pos;
|
||||||
let hi =
|
|
||||||
(hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos;
|
|
||||||
|
|
||||||
// Do not try to decode parent for foreign spans.
|
// Do not try to decode parent for foreign spans.
|
||||||
Span::new(lo, hi, ctxt, None)
|
Span::new(lo, hi, ctxt, None)
|
||||||
|
@ -1482,7 +1452,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
|
||||||
///
|
///
|
||||||
/// Proc macro crates don't currently export spans, so this function does not have
|
/// Proc macro crates don't currently export spans, so this function does not have
|
||||||
/// to work for them.
|
/// to work for them.
|
||||||
fn imported_source_files(self, sess: &Session) -> &'a [ImportedSourceFile] {
|
fn imported_source_file(self, source_file_index: u32, sess: &Session) -> ImportedSourceFile {
|
||||||
fn filter<'a>(sess: &Session, path: Option<&'a Path>) -> Option<&'a Path> {
|
fn filter<'a>(sess: &Session, path: Option<&'a Path>) -> Option<&'a Path> {
|
||||||
path.filter(|_| {
|
path.filter(|_| {
|
||||||
// Only spend time on further checks if we have what to translate *to*.
|
// Only spend time on further checks if we have what to translate *to*.
|
||||||
|
@ -1570,90 +1540,96 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
self.cdata.source_map_import_info.get_or_init(|| {
|
let mut import_info = self.cdata.source_map_import_info.lock();
|
||||||
let external_source_map = self.root.source_map.decode(self);
|
for _ in import_info.len()..=(source_file_index as usize) {
|
||||||
|
import_info.push(None);
|
||||||
|
}
|
||||||
|
import_info[source_file_index as usize]
|
||||||
|
.get_or_insert_with(|| {
|
||||||
|
let source_file_to_import = self
|
||||||
|
.root
|
||||||
|
.source_map
|
||||||
|
.get(self, source_file_index)
|
||||||
|
.expect("missing source file")
|
||||||
|
.decode(self);
|
||||||
|
|
||||||
external_source_map
|
// We can't reuse an existing SourceFile, so allocate a new one
|
||||||
.map(|source_file_to_import| {
|
// containing the information we need.
|
||||||
// We can't reuse an existing SourceFile, so allocate a new one
|
let rustc_span::SourceFile {
|
||||||
// containing the information we need.
|
mut name,
|
||||||
let rustc_span::SourceFile {
|
src_hash,
|
||||||
mut name,
|
start_pos,
|
||||||
src_hash,
|
end_pos,
|
||||||
start_pos,
|
lines,
|
||||||
end_pos,
|
multibyte_chars,
|
||||||
lines,
|
non_narrow_chars,
|
||||||
multibyte_chars,
|
normalized_pos,
|
||||||
non_narrow_chars,
|
name_hash,
|
||||||
normalized_pos,
|
..
|
||||||
name_hash,
|
} = source_file_to_import;
|
||||||
..
|
|
||||||
} = source_file_to_import;
|
|
||||||
|
|
||||||
// If this file is under $sysroot/lib/rustlib/src/ but has not been remapped
|
// If this file is under $sysroot/lib/rustlib/src/ but has not been remapped
|
||||||
// during rust bootstrapping by `remap-debuginfo = true`, and the user
|
// during rust bootstrapping by `remap-debuginfo = true`, and the user
|
||||||
// wish to simulate that behaviour by -Z simulate-remapped-rust-src-base,
|
// wish to simulate that behaviour by -Z simulate-remapped-rust-src-base,
|
||||||
// then we change `name` to a similar state as if the rust was bootstrapped
|
// then we change `name` to a similar state as if the rust was bootstrapped
|
||||||
// with `remap-debuginfo = true`.
|
// with `remap-debuginfo = true`.
|
||||||
// This is useful for testing so that tests about the effects of
|
// This is useful for testing so that tests about the effects of
|
||||||
// `try_to_translate_virtual_to_real` don't have to worry about how the
|
// `try_to_translate_virtual_to_real` don't have to worry about how the
|
||||||
// compiler is bootstrapped.
|
// compiler is bootstrapped.
|
||||||
if let Some(virtual_dir) =
|
if let Some(virtual_dir) = &sess.opts.unstable_opts.simulate_remapped_rust_src_base
|
||||||
&sess.opts.unstable_opts.simulate_remapped_rust_src_base
|
{
|
||||||
{
|
if let Some(real_dir) = &sess.opts.real_rust_source_base_dir {
|
||||||
if let Some(real_dir) = &sess.opts.real_rust_source_base_dir {
|
if let rustc_span::FileName::Real(ref mut old_name) = name {
|
||||||
if let rustc_span::FileName::Real(ref mut old_name) = name {
|
if let rustc_span::RealFileName::LocalPath(local) = old_name {
|
||||||
if let rustc_span::RealFileName::LocalPath(local) = old_name {
|
if let Ok(rest) = local.strip_prefix(real_dir) {
|
||||||
if let Ok(rest) = local.strip_prefix(real_dir) {
|
*old_name = rustc_span::RealFileName::Remapped {
|
||||||
*old_name = rustc_span::RealFileName::Remapped {
|
local_path: None,
|
||||||
local_path: None,
|
virtual_name: virtual_dir.join(rest),
|
||||||
virtual_name: virtual_dir.join(rest),
|
};
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If this file's path has been remapped to `/rustc/$hash`,
|
// If this file's path has been remapped to `/rustc/$hash`,
|
||||||
// we might be able to reverse that (also see comments above,
|
// we might be able to reverse that (also see comments above,
|
||||||
// on `try_to_translate_virtual_to_real`).
|
// on `try_to_translate_virtual_to_real`).
|
||||||
try_to_translate_virtual_to_real(&mut name);
|
try_to_translate_virtual_to_real(&mut name);
|
||||||
|
|
||||||
let source_length = (end_pos - start_pos).to_usize();
|
let source_length = (end_pos - start_pos).to_usize();
|
||||||
|
|
||||||
let local_version = sess.source_map().new_imported_source_file(
|
let local_version = sess.source_map().new_imported_source_file(
|
||||||
name,
|
name,
|
||||||
src_hash,
|
src_hash,
|
||||||
name_hash,
|
name_hash,
|
||||||
source_length,
|
source_length,
|
||||||
self.cnum,
|
self.cnum,
|
||||||
lines,
|
lines,
|
||||||
multibyte_chars,
|
multibyte_chars,
|
||||||
non_narrow_chars,
|
non_narrow_chars,
|
||||||
normalized_pos,
|
normalized_pos,
|
||||||
start_pos,
|
start_pos,
|
||||||
end_pos,
|
source_file_index,
|
||||||
);
|
);
|
||||||
debug!(
|
debug!(
|
||||||
"CrateMetaData::imported_source_files alloc \
|
"CrateMetaData::imported_source_files alloc \
|
||||||
source_file {:?} original (start_pos {:?} end_pos {:?}) \
|
source_file {:?} original (start_pos {:?} end_pos {:?}) \
|
||||||
translated (start_pos {:?} end_pos {:?})",
|
translated (start_pos {:?} end_pos {:?})",
|
||||||
local_version.name,
|
local_version.name,
|
||||||
start_pos,
|
start_pos,
|
||||||
end_pos,
|
end_pos,
|
||||||
local_version.start_pos,
|
local_version.start_pos,
|
||||||
local_version.end_pos
|
local_version.end_pos
|
||||||
);
|
);
|
||||||
|
|
||||||
ImportedSourceFile {
|
ImportedSourceFile {
|
||||||
original_start_pos: start_pos,
|
original_start_pos: start_pos,
|
||||||
original_end_pos: end_pos,
|
original_end_pos: end_pos,
|
||||||
translated_source_file: local_version,
|
translated_source_file: local_version,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.clone()
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_generator_diagnostic_data(
|
fn get_generator_diagnostic_data(
|
||||||
|
@ -1716,7 +1692,7 @@ impl CrateMetadata {
|
||||||
trait_impls,
|
trait_impls,
|
||||||
incoherent_impls: Default::default(),
|
incoherent_impls: Default::default(),
|
||||||
raw_proc_macros,
|
raw_proc_macros,
|
||||||
source_map_import_info: OnceCell::new(),
|
source_map_import_info: Lock::new(Vec::new()),
|
||||||
def_path_hash_map,
|
def_path_hash_map,
|
||||||
expn_hash_map: Default::default(),
|
expn_hash_map: Default::default(),
|
||||||
alloc_decoding_state,
|
alloc_decoding_state,
|
||||||
|
|
|
@ -676,6 +676,9 @@ impl CrateStore for CStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn import_source_files(&self, sess: &Session, cnum: CrateNum) {
|
fn import_source_files(&self, sess: &Session, cnum: CrateNum) {
|
||||||
self.get_crate_data(cnum).imported_source_files(sess);
|
let cdata = self.get_crate_data(cnum);
|
||||||
|
for file_index in 0..cdata.root.source_map.size() {
|
||||||
|
cdata.imported_source_file(file_index as u32, sess);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,6 @@ use rustc_hir::definitions::DefPathData;
|
||||||
use rustc_hir::intravisit::{self, Visitor};
|
use rustc_hir::intravisit::{self, Visitor};
|
||||||
use rustc_hir::lang_items;
|
use rustc_hir::lang_items;
|
||||||
use rustc_hir::{AnonConst, GenericParamKind};
|
use rustc_hir::{AnonConst, GenericParamKind};
|
||||||
use rustc_index::bit_set::GrowableBitSet;
|
|
||||||
use rustc_middle::hir::nested_filter;
|
use rustc_middle::hir::nested_filter;
|
||||||
use rustc_middle::middle::dependency_format::Linkage;
|
use rustc_middle::middle::dependency_format::Linkage;
|
||||||
use rustc_middle::middle::exported_symbols::{
|
use rustc_middle::middle::exported_symbols::{
|
||||||
|
@ -67,13 +66,10 @@ pub(super) struct EncodeContext<'a, 'tcx> {
|
||||||
// The indices (into the `SourceMap`'s `MonotonicVec`)
|
// The indices (into the `SourceMap`'s `MonotonicVec`)
|
||||||
// of all of the `SourceFiles` that we need to serialize.
|
// of all of the `SourceFiles` that we need to serialize.
|
||||||
// When we serialize a `Span`, we insert the index of its
|
// When we serialize a `Span`, we insert the index of its
|
||||||
// `SourceFile` into the `GrowableBitSet`.
|
// `SourceFile` into the `FxIndexSet`.
|
||||||
//
|
// The order inside the `FxIndexSet` is used as on-disk
|
||||||
// This needs to be a `GrowableBitSet` and not a
|
// order of `SourceFiles`, and encoded inside `Span`s.
|
||||||
// regular `BitSet` because we may actually import new `SourceFiles`
|
required_source_files: Option<FxIndexSet<usize>>,
|
||||||
// during metadata encoding, due to executing a query
|
|
||||||
// with a result containing a foreign `Span`.
|
|
||||||
required_source_files: Option<GrowableBitSet<usize>>,
|
|
||||||
is_proc_macro: bool,
|
is_proc_macro: bool,
|
||||||
hygiene_ctxt: &'a HygieneEncodeContext,
|
hygiene_ctxt: &'a HygieneEncodeContext,
|
||||||
symbol_table: FxHashMap<Symbol, usize>,
|
symbol_table: FxHashMap<Symbol, usize>,
|
||||||
|
@ -240,17 +236,15 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for Span {
|
||||||
s.source_file_cache =
|
s.source_file_cache =
|
||||||
(source_map.files()[source_file_index].clone(), source_file_index);
|
(source_map.files()[source_file_index].clone(), source_file_index);
|
||||||
}
|
}
|
||||||
|
let (ref source_file, source_file_index) = s.source_file_cache;
|
||||||
|
debug_assert!(source_file.contains(span.lo));
|
||||||
|
|
||||||
if !s.source_file_cache.0.contains(span.hi) {
|
if !source_file.contains(span.hi) {
|
||||||
// Unfortunately, macro expansion still sometimes generates Spans
|
// Unfortunately, macro expansion still sometimes generates Spans
|
||||||
// that malformed in this way.
|
// that malformed in this way.
|
||||||
return TAG_PARTIAL_SPAN.encode(s);
|
return TAG_PARTIAL_SPAN.encode(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
let source_files = s.required_source_files.as_mut().expect("Already encoded SourceMap!");
|
|
||||||
// Record the fact that we need to encode the data for this `SourceFile`
|
|
||||||
source_files.insert(s.source_file_cache.1);
|
|
||||||
|
|
||||||
// There are two possible cases here:
|
// There are two possible cases here:
|
||||||
// 1. This span comes from a 'foreign' crate - e.g. some crate upstream of the
|
// 1. This span comes from a 'foreign' crate - e.g. some crate upstream of the
|
||||||
// crate we are writing metadata for. When the metadata for *this* crate gets
|
// crate we are writing metadata for. When the metadata for *this* crate gets
|
||||||
|
@ -267,7 +261,7 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for Span {
|
||||||
// if we're a proc-macro crate.
|
// if we're a proc-macro crate.
|
||||||
// This allows us to avoid loading the dependencies of proc-macro crates: all of
|
// This allows us to avoid loading the dependencies of proc-macro crates: all of
|
||||||
// the information we need to decode `Span`s is stored in the proc-macro crate.
|
// the information we need to decode `Span`s is stored in the proc-macro crate.
|
||||||
let (tag, lo, hi) = if s.source_file_cache.0.is_imported() && !s.is_proc_macro {
|
let (tag, metadata_index) = if source_file.is_imported() && !s.is_proc_macro {
|
||||||
// To simplify deserialization, we 'rebase' this span onto the crate it originally came from
|
// To simplify deserialization, we 'rebase' this span onto the crate it originally came from
|
||||||
// (the crate that 'owns' the file it references. These rebased 'lo' and 'hi' values
|
// (the crate that 'owns' the file it references. These rebased 'lo' and 'hi' values
|
||||||
// are relative to the source map information for the 'foreign' crate whose CrateNum
|
// are relative to the source map information for the 'foreign' crate whose CrateNum
|
||||||
|
@ -277,29 +271,41 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for Span {
|
||||||
//
|
//
|
||||||
// All of this logic ensures that the final result of deserialization is a 'normal'
|
// All of this logic ensures that the final result of deserialization is a 'normal'
|
||||||
// Span that can be used without any additional trouble.
|
// Span that can be used without any additional trouble.
|
||||||
let external_start_pos = {
|
let metadata_index = {
|
||||||
// Introduce a new scope so that we drop the 'lock()' temporary
|
// Introduce a new scope so that we drop the 'lock()' temporary
|
||||||
match &*s.source_file_cache.0.external_src.lock() {
|
match &*source_file.external_src.lock() {
|
||||||
ExternalSource::Foreign { original_start_pos, .. } => *original_start_pos,
|
ExternalSource::Foreign { metadata_index, .. } => *metadata_index,
|
||||||
src => panic!("Unexpected external source {:?}", src),
|
src => panic!("Unexpected external source {:?}", src),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let lo = (span.lo - s.source_file_cache.0.start_pos) + external_start_pos;
|
|
||||||
let hi = (span.hi - s.source_file_cache.0.start_pos) + external_start_pos;
|
|
||||||
|
|
||||||
(TAG_VALID_SPAN_FOREIGN, lo, hi)
|
(TAG_VALID_SPAN_FOREIGN, metadata_index)
|
||||||
} else {
|
} else {
|
||||||
(TAG_VALID_SPAN_LOCAL, span.lo, span.hi)
|
// Record the fact that we need to encode the data for this `SourceFile`
|
||||||
|
let source_files =
|
||||||
|
s.required_source_files.as_mut().expect("Already encoded SourceMap!");
|
||||||
|
let (metadata_index, _) = source_files.insert_full(source_file_index);
|
||||||
|
let metadata_index: u32 =
|
||||||
|
metadata_index.try_into().expect("cannot export more than U32_MAX files");
|
||||||
|
|
||||||
|
(TAG_VALID_SPAN_LOCAL, metadata_index)
|
||||||
};
|
};
|
||||||
|
|
||||||
tag.encode(s);
|
// Encode the start position relative to the file start, so we profit more from the
|
||||||
lo.encode(s);
|
// variable-length integer encoding.
|
||||||
|
let lo = span.lo - source_file.start_pos;
|
||||||
|
|
||||||
// Encode length which is usually less than span.hi and profits more
|
// Encode length which is usually less than span.hi and profits more
|
||||||
// from the variable-length integer encoding that we use.
|
// from the variable-length integer encoding that we use.
|
||||||
let len = hi - lo;
|
let len = span.hi - span.lo;
|
||||||
|
|
||||||
|
tag.encode(s);
|
||||||
|
lo.encode(s);
|
||||||
len.encode(s);
|
len.encode(s);
|
||||||
|
|
||||||
|
// Encode the index of the `SourceFile` for the span, in order to make decoding faster.
|
||||||
|
metadata_index.encode(s);
|
||||||
|
|
||||||
if tag == TAG_VALID_SPAN_FOREIGN {
|
if tag == TAG_VALID_SPAN_FOREIGN {
|
||||||
// This needs to be two lines to avoid holding the `s.source_file_cache`
|
// This needs to be two lines to avoid holding the `s.source_file_cache`
|
||||||
// while calling `cnum.encode(s)`
|
// while calling `cnum.encode(s)`
|
||||||
|
@ -469,7 +475,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
self.lazy(DefPathHashMapRef::BorrowedFromTcx(self.tcx.def_path_hash_to_def_index_map()))
|
self.lazy(DefPathHashMapRef::BorrowedFromTcx(self.tcx.def_path_hash_to_def_index_map()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encode_source_map(&mut self) -> LazyArray<rustc_span::SourceFile> {
|
fn encode_source_map(&mut self) -> LazyTable<u32, LazyValue<rustc_span::SourceFile>> {
|
||||||
let source_map = self.tcx.sess.source_map();
|
let source_map = self.tcx.sess.source_map();
|
||||||
let all_source_files = source_map.files();
|
let all_source_files = source_map.files();
|
||||||
|
|
||||||
|
@ -480,66 +486,64 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
|
|
||||||
let working_directory = &self.tcx.sess.opts.working_dir;
|
let working_directory = &self.tcx.sess.opts.working_dir;
|
||||||
|
|
||||||
let adapted = all_source_files
|
let mut adapted = TableBuilder::default();
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.filter(|(idx, source_file)| {
|
|
||||||
// Only serialize `SourceFile`s that were used
|
|
||||||
// during the encoding of a `Span`
|
|
||||||
required_source_files.contains(*idx) &&
|
|
||||||
// Don't serialize imported `SourceFile`s, unless
|
|
||||||
// we're in a proc-macro crate.
|
|
||||||
(!source_file.is_imported() || self.is_proc_macro)
|
|
||||||
})
|
|
||||||
.map(|(_, source_file)| {
|
|
||||||
// At export time we expand all source file paths to absolute paths because
|
|
||||||
// downstream compilation sessions can have a different compiler working
|
|
||||||
// directory, so relative paths from this or any other upstream crate
|
|
||||||
// won't be valid anymore.
|
|
||||||
//
|
|
||||||
// At this point we also erase the actual on-disk path and only keep
|
|
||||||
// the remapped version -- as is necessary for reproducible builds.
|
|
||||||
match source_file.name {
|
|
||||||
FileName::Real(ref original_file_name) => {
|
|
||||||
let adapted_file_name =
|
|
||||||
source_map.path_mapping().to_embeddable_absolute_path(
|
|
||||||
original_file_name.clone(),
|
|
||||||
working_directory,
|
|
||||||
);
|
|
||||||
|
|
||||||
if adapted_file_name != *original_file_name {
|
// Only serialize `SourceFile`s that were used during the encoding of a `Span`.
|
||||||
let mut adapted: SourceFile = (**source_file).clone();
|
//
|
||||||
adapted.name = FileName::Real(adapted_file_name);
|
// The order in which we encode source files is important here: the on-disk format for
|
||||||
adapted.name_hash = {
|
// `Span` contains the index of the corresponding `SourceFile`.
|
||||||
let mut hasher: StableHasher = StableHasher::new();
|
for (on_disk_index, &source_file_index) in required_source_files.iter().enumerate() {
|
||||||
adapted.name.hash(&mut hasher);
|
let source_file = &all_source_files[source_file_index];
|
||||||
hasher.finish::<u128>()
|
// Don't serialize imported `SourceFile`s, unless we're in a proc-macro crate.
|
||||||
};
|
assert!(!source_file.is_imported() || self.is_proc_macro);
|
||||||
Lrc::new(adapted)
|
|
||||||
} else {
|
// At export time we expand all source file paths to absolute paths because
|
||||||
// Nothing to adapt
|
// downstream compilation sessions can have a different compiler working
|
||||||
source_file.clone()
|
// directory, so relative paths from this or any other upstream crate
|
||||||
}
|
// won't be valid anymore.
|
||||||
|
//
|
||||||
|
// At this point we also erase the actual on-disk path and only keep
|
||||||
|
// the remapped version -- as is necessary for reproducible builds.
|
||||||
|
let mut source_file = match source_file.name {
|
||||||
|
FileName::Real(ref original_file_name) => {
|
||||||
|
let adapted_file_name = source_map
|
||||||
|
.path_mapping()
|
||||||
|
.to_embeddable_absolute_path(original_file_name.clone(), working_directory);
|
||||||
|
|
||||||
|
if adapted_file_name != *original_file_name {
|
||||||
|
let mut adapted: SourceFile = (**source_file).clone();
|
||||||
|
adapted.name = FileName::Real(adapted_file_name);
|
||||||
|
adapted.name_hash = {
|
||||||
|
let mut hasher: StableHasher = StableHasher::new();
|
||||||
|
adapted.name.hash(&mut hasher);
|
||||||
|
hasher.finish::<u128>()
|
||||||
|
};
|
||||||
|
Lrc::new(adapted)
|
||||||
|
} else {
|
||||||
|
// Nothing to adapt
|
||||||
|
source_file.clone()
|
||||||
}
|
}
|
||||||
// expanded code, not from a file
|
|
||||||
_ => source_file.clone(),
|
|
||||||
}
|
}
|
||||||
})
|
// expanded code, not from a file
|
||||||
.map(|mut source_file| {
|
_ => source_file.clone(),
|
||||||
// We're serializing this `SourceFile` into our crate metadata,
|
};
|
||||||
// so mark it as coming from this crate.
|
|
||||||
// This also ensures that we don't try to deserialize the
|
|
||||||
// `CrateNum` for a proc-macro dependency - since proc macro
|
|
||||||
// dependencies aren't loaded when we deserialize a proc-macro,
|
|
||||||
// trying to remap the `CrateNum` would fail.
|
|
||||||
if self.is_proc_macro {
|
|
||||||
Lrc::make_mut(&mut source_file).cnum = LOCAL_CRATE;
|
|
||||||
}
|
|
||||||
source_file
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
self.lazy_array(adapted.iter().map(|rc| &**rc))
|
// We're serializing this `SourceFile` into our crate metadata,
|
||||||
|
// so mark it as coming from this crate.
|
||||||
|
// This also ensures that we don't try to deserialize the
|
||||||
|
// `CrateNum` for a proc-macro dependency - since proc macro
|
||||||
|
// dependencies aren't loaded when we deserialize a proc-macro,
|
||||||
|
// trying to remap the `CrateNum` would fail.
|
||||||
|
if self.is_proc_macro {
|
||||||
|
Lrc::make_mut(&mut source_file).cnum = LOCAL_CRATE;
|
||||||
|
}
|
||||||
|
|
||||||
|
let on_disk_index: u32 =
|
||||||
|
on_disk_index.try_into().expect("cannot export more than U32_MAX files");
|
||||||
|
adapted.set(on_disk_index, self.lazy(source_file));
|
||||||
|
}
|
||||||
|
|
||||||
|
adapted.encode(&mut self.opaque)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encode_crate_root(&mut self) -> LazyValue<CrateRoot> {
|
fn encode_crate_root(&mut self) -> LazyValue<CrateRoot> {
|
||||||
|
@ -2261,7 +2265,7 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>, path: &Path) {
|
||||||
|
|
||||||
let source_map_files = tcx.sess.source_map().files();
|
let source_map_files = tcx.sess.source_map().files();
|
||||||
let source_file_cache = (source_map_files[0].clone(), 0);
|
let source_file_cache = (source_map_files[0].clone(), 0);
|
||||||
let required_source_files = Some(GrowableBitSet::with_capacity(source_map_files.len()));
|
let required_source_files = Some(FxIndexSet::default());
|
||||||
drop(source_map_files);
|
drop(source_map_files);
|
||||||
|
|
||||||
let hygiene_ctxt = HygieneEncodeContext::default();
|
let hygiene_ctxt = HygieneEncodeContext::default();
|
||||||
|
|
|
@ -249,7 +249,7 @@ pub(crate) struct CrateRoot {
|
||||||
|
|
||||||
def_path_hash_map: LazyValue<DefPathHashMapRef<'static>>,
|
def_path_hash_map: LazyValue<DefPathHashMapRef<'static>>,
|
||||||
|
|
||||||
source_map: LazyArray<rustc_span::SourceFile>,
|
source_map: LazyTable<u32, LazyValue<rustc_span::SourceFile>>,
|
||||||
|
|
||||||
compiler_builtins: bool,
|
compiler_builtins: bool,
|
||||||
needs_allocator: bool,
|
needs_allocator: bool,
|
||||||
|
|
|
@ -1094,10 +1094,8 @@ pub enum ExternalSource {
|
||||||
Unneeded,
|
Unneeded,
|
||||||
Foreign {
|
Foreign {
|
||||||
kind: ExternalSourceKind,
|
kind: ExternalSourceKind,
|
||||||
/// This SourceFile's byte-offset within the source_map of its original crate.
|
/// Index of the file inside metadata.
|
||||||
original_start_pos: BytePos,
|
metadata_index: u32,
|
||||||
/// The end of this SourceFile within the source_map of its original crate.
|
|
||||||
original_end_pos: BytePos,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -336,7 +336,7 @@ impl SourceMap {
|
||||||
mut file_local_non_narrow_chars: Vec<NonNarrowChar>,
|
mut file_local_non_narrow_chars: Vec<NonNarrowChar>,
|
||||||
mut file_local_normalized_pos: Vec<NormalizedPos>,
|
mut file_local_normalized_pos: Vec<NormalizedPos>,
|
||||||
original_start_pos: BytePos,
|
original_start_pos: BytePos,
|
||||||
original_end_pos: BytePos,
|
metadata_index: u32,
|
||||||
) -> Lrc<SourceFile> {
|
) -> Lrc<SourceFile> {
|
||||||
let start_pos = self
|
let start_pos = self
|
||||||
.allocate_address_space(source_len)
|
.allocate_address_space(source_len)
|
||||||
|
@ -381,8 +381,7 @@ impl SourceMap {
|
||||||
src_hash,
|
src_hash,
|
||||||
external_src: Lock::new(ExternalSource::Foreign {
|
external_src: Lock::new(ExternalSource::Foreign {
|
||||||
kind: ExternalSourceKind::AbsentOk,
|
kind: ExternalSourceKind::AbsentOk,
|
||||||
original_start_pos,
|
metadata_index,
|
||||||
original_end_pos,
|
|
||||||
}),
|
}),
|
||||||
start_pos,
|
start_pos,
|
||||||
end_pos,
|
end_pos,
|
||||||
|
|
|
@ -251,7 +251,7 @@ fn t10() {
|
||||||
non_narrow_chars,
|
non_narrow_chars,
|
||||||
normalized_pos,
|
normalized_pos,
|
||||||
start_pos,
|
start_pos,
|
||||||
end_pos,
|
0,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
|
|
|
@ -18,16 +18,16 @@ error[E0599]: the method `collect` exists for struct `Cloned<TakeWhile<&mut std:
|
||||||
LL | .collect();
|
LL | .collect();
|
||||||
| ^^^^^^^ method cannot be called on `Cloned<TakeWhile<&mut std::vec::IntoIter<u8>, [closure@$DIR/issue-31173.rs:7:21: 7:25]>>` due to unsatisfied trait bounds
|
| ^^^^^^^ method cannot be called on `Cloned<TakeWhile<&mut std::vec::IntoIter<u8>, [closure@$DIR/issue-31173.rs:7:21: 7:25]>>` due to unsatisfied trait bounds
|
||||||
|
|
|
|
||||||
::: $SRC_DIR/core/src/iter/adapters/cloned.rs:LL:COL
|
|
||||||
|
|
|
||||||
LL | pub struct Cloned<I> {
|
|
||||||
| -------------------- doesn't satisfy `_: Iterator`
|
|
||||||
|
|
|
||||||
::: $SRC_DIR/core/src/iter/adapters/take_while.rs:LL:COL
|
::: $SRC_DIR/core/src/iter/adapters/take_while.rs:LL:COL
|
||||||
|
|
|
|
||||||
LL | pub struct TakeWhile<I, P> {
|
LL | pub struct TakeWhile<I, P> {
|
||||||
| -------------------------- doesn't satisfy `<_ as Iterator>::Item = &_`
|
| -------------------------- doesn't satisfy `<_ as Iterator>::Item = &_`
|
||||||
|
|
|
|
||||||
|
::: $SRC_DIR/core/src/iter/adapters/cloned.rs:LL:COL
|
||||||
|
|
|
||||||
|
LL | pub struct Cloned<I> {
|
||||||
|
| -------------------- doesn't satisfy `_: Iterator`
|
||||||
|
|
|
||||||
= note: the following trait bounds were not satisfied:
|
= note: the following trait bounds were not satisfied:
|
||||||
`<TakeWhile<&mut std::vec::IntoIter<u8>, [closure@$DIR/issue-31173.rs:7:21: 7:25]> as Iterator>::Item = &_`
|
`<TakeWhile<&mut std::vec::IntoIter<u8>, [closure@$DIR/issue-31173.rs:7:21: 7:25]> as Iterator>::Item = &_`
|
||||||
which is required by `Cloned<TakeWhile<&mut std::vec::IntoIter<u8>, [closure@$DIR/issue-31173.rs:7:21: 7:25]>>: Iterator`
|
which is required by `Cloned<TakeWhile<&mut std::vec::IntoIter<u8>, [closure@$DIR/issue-31173.rs:7:21: 7:25]>>: Iterator`
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue