Encode index of SourceFile along with span.
This commit is contained in:
parent
55f46419af
commit
f20ceb1c6f
5 changed files with 65 additions and 69 deletions
|
@ -527,6 +527,9 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
|
||||||
bug!("Cannot decode Span without Session.")
|
bug!("Cannot decode Span without Session.")
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Index of the file in the corresponding crate's list of encoded files.
|
||||||
|
let metadata_index = usize::decode(decoder);
|
||||||
|
|
||||||
// There are two possibilities here:
|
// There are two possibilities here:
|
||||||
// 1. This is a 'local span', which is located inside a `SourceFile`
|
// 1. This is a 'local span', which is located inside a `SourceFile`
|
||||||
// that came from this crate. In this case, we use the source map data
|
// that came from this crate. In this case, we use the source map data
|
||||||
|
@ -587,27 +590,9 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Span {
|
||||||
foreign_data.imported_source_files(sess)
|
foreign_data.imported_source_files(sess)
|
||||||
};
|
};
|
||||||
|
|
||||||
let source_file = {
|
// Optimize for the case that most spans within a translated item
|
||||||
// Optimize for the case that most spans within a translated item
|
// originate from the same source_file.
|
||||||
// originate from the same source_file.
|
let source_file = &imported_source_files[metadata_index];
|
||||||
let last_source_file = &imported_source_files[decoder.last_source_file_index];
|
|
||||||
|
|
||||||
if lo >= last_source_file.original_start_pos && lo <= last_source_file.original_end_pos
|
|
||||||
{
|
|
||||||
last_source_file
|
|
||||||
} else {
|
|
||||||
let index = imported_source_files
|
|
||||||
.binary_search_by_key(&lo, |source_file| source_file.original_start_pos)
|
|
||||||
.unwrap_or_else(|index| index - 1);
|
|
||||||
|
|
||||||
// Don't try to cache the index for foreign spans,
|
|
||||||
// as this would require a map from CrateNums to indices
|
|
||||||
if tag == TAG_VALID_SPAN_LOCAL {
|
|
||||||
decoder.last_source_file_index = index;
|
|
||||||
}
|
|
||||||
&imported_source_files[index]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Make sure our binary search above is correct.
|
// Make sure our binary search above is correct.
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
|
@ -1545,7 +1530,8 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
|
||||||
let external_source_map = self.root.source_map.decode(self);
|
let external_source_map = self.root.source_map.decode(self);
|
||||||
|
|
||||||
external_source_map
|
external_source_map
|
||||||
.map(|source_file_to_import| {
|
.enumerate()
|
||||||
|
.map(|(source_file_index, source_file_to_import)| {
|
||||||
// We can't reuse an existing SourceFile, so allocate a new one
|
// We can't reuse an existing SourceFile, so allocate a new one
|
||||||
// containing the information we need.
|
// containing the information we need.
|
||||||
let rustc_span::SourceFile {
|
let rustc_span::SourceFile {
|
||||||
|
@ -1605,6 +1591,9 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
|
||||||
normalized_pos,
|
normalized_pos,
|
||||||
start_pos,
|
start_pos,
|
||||||
end_pos,
|
end_pos,
|
||||||
|
source_file_index
|
||||||
|
.try_into()
|
||||||
|
.expect("cannot import more than U32_MAX files"),
|
||||||
);
|
);
|
||||||
debug!(
|
debug!(
|
||||||
"CrateMetaData::imported_source_files alloc \
|
"CrateMetaData::imported_source_files alloc \
|
||||||
|
|
|
@ -17,7 +17,6 @@ use rustc_hir::definitions::DefPathData;
|
||||||
use rustc_hir::intravisit::{self, Visitor};
|
use rustc_hir::intravisit::{self, Visitor};
|
||||||
use rustc_hir::lang_items;
|
use rustc_hir::lang_items;
|
||||||
use rustc_hir::{AnonConst, GenericParamKind};
|
use rustc_hir::{AnonConst, GenericParamKind};
|
||||||
use rustc_index::bit_set::GrowableBitSet;
|
|
||||||
use rustc_middle::hir::nested_filter;
|
use rustc_middle::hir::nested_filter;
|
||||||
use rustc_middle::middle::dependency_format::Linkage;
|
use rustc_middle::middle::dependency_format::Linkage;
|
||||||
use rustc_middle::middle::exported_symbols::{
|
use rustc_middle::middle::exported_symbols::{
|
||||||
|
@ -66,13 +65,10 @@ pub(super) struct EncodeContext<'a, 'tcx> {
|
||||||
// The indices (into the `SourceMap`'s `MonotonicVec`)
|
// The indices (into the `SourceMap`'s `MonotonicVec`)
|
||||||
// of all of the `SourceFiles` that we need to serialize.
|
// of all of the `SourceFiles` that we need to serialize.
|
||||||
// When we serialize a `Span`, we insert the index of its
|
// When we serialize a `Span`, we insert the index of its
|
||||||
// `SourceFile` into the `GrowableBitSet`.
|
// `SourceFile` into the `FxIndexSet`.
|
||||||
//
|
// The order inside the `FxIndexSet` is used as on-disk
|
||||||
// This needs to be a `GrowableBitSet` and not a
|
// order of `SourceFiles`, and encoded inside `Span`s.
|
||||||
// regular `BitSet` because we may actually import new `SourceFiles`
|
required_source_files: Option<FxIndexSet<usize>>,
|
||||||
// during metadata encoding, due to executing a query
|
|
||||||
// with a result containing a foreign `Span`.
|
|
||||||
required_source_files: Option<GrowableBitSet<usize>>,
|
|
||||||
is_proc_macro: bool,
|
is_proc_macro: bool,
|
||||||
hygiene_ctxt: &'a HygieneEncodeContext,
|
hygiene_ctxt: &'a HygieneEncodeContext,
|
||||||
}
|
}
|
||||||
|
@ -245,10 +241,6 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for Span {
|
||||||
return TAG_PARTIAL_SPAN.encode(s);
|
return TAG_PARTIAL_SPAN.encode(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
let source_files = s.required_source_files.as_mut().expect("Already encoded SourceMap!");
|
|
||||||
// Record the fact that we need to encode the data for this `SourceFile`
|
|
||||||
source_files.insert(s.source_file_cache.1);
|
|
||||||
|
|
||||||
// There are two possible cases here:
|
// There are two possible cases here:
|
||||||
// 1. This span comes from a 'foreign' crate - e.g. some crate upstream of the
|
// 1. This span comes from a 'foreign' crate - e.g. some crate upstream of the
|
||||||
// crate we are writing metadata for. When the metadata for *this* crate gets
|
// crate we are writing metadata for. When the metadata for *this* crate gets
|
||||||
|
@ -265,30 +257,38 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for Span {
|
||||||
// if we're a proc-macro crate.
|
// if we're a proc-macro crate.
|
||||||
// This allows us to avoid loading the dependencies of proc-macro crates: all of
|
// This allows us to avoid loading the dependencies of proc-macro crates: all of
|
||||||
// the information we need to decode `Span`s is stored in the proc-macro crate.
|
// the information we need to decode `Span`s is stored in the proc-macro crate.
|
||||||
let (tag, lo, hi) = if s.source_file_cache.0.is_imported() && !s.is_proc_macro {
|
let (tag, lo, hi, metadata_index) =
|
||||||
// To simplify deserialization, we 'rebase' this span onto the crate it originally came from
|
if s.source_file_cache.0.is_imported() && !s.is_proc_macro {
|
||||||
// (the crate that 'owns' the file it references. These rebased 'lo' and 'hi' values
|
// To simplify deserialization, we 'rebase' this span onto the crate it originally came from
|
||||||
// are relative to the source map information for the 'foreign' crate whose CrateNum
|
// (the crate that 'owns' the file it references. These rebased 'lo' and 'hi' values
|
||||||
// we write into the metadata. This allows `imported_source_files` to binary
|
// are relative to the source map information for the 'foreign' crate whose CrateNum
|
||||||
// search through the 'foreign' crate's source map information, using the
|
// we write into the metadata. This allows `imported_source_files` to binary
|
||||||
// deserialized 'lo' and 'hi' values directly.
|
// search through the 'foreign' crate's source map information, using the
|
||||||
//
|
// deserialized 'lo' and 'hi' values directly.
|
||||||
// All of this logic ensures that the final result of deserialization is a 'normal'
|
//
|
||||||
// Span that can be used without any additional trouble.
|
// All of this logic ensures that the final result of deserialization is a 'normal'
|
||||||
let external_start_pos = {
|
// Span that can be used without any additional trouble.
|
||||||
// Introduce a new scope so that we drop the 'lock()' temporary
|
let (external_start_pos, metadata_index) = {
|
||||||
match &*s.source_file_cache.0.external_src.lock() {
|
// Introduce a new scope so that we drop the 'lock()' temporary
|
||||||
ExternalSource::Foreign { original_start_pos, .. } => *original_start_pos,
|
match &*s.source_file_cache.0.external_src.lock() {
|
||||||
src => panic!("Unexpected external source {:?}", src),
|
ExternalSource::Foreign { original_start_pos, metadata_index, .. } => {
|
||||||
}
|
(*original_start_pos, *metadata_index as usize)
|
||||||
};
|
}
|
||||||
let lo = (span.lo - s.source_file_cache.0.start_pos) + external_start_pos;
|
src => panic!("Unexpected external source {:?}", src),
|
||||||
let hi = (span.hi - s.source_file_cache.0.start_pos) + external_start_pos;
|
}
|
||||||
|
};
|
||||||
|
let lo = (span.lo - s.source_file_cache.0.start_pos) + external_start_pos;
|
||||||
|
let hi = (span.hi - s.source_file_cache.0.start_pos) + external_start_pos;
|
||||||
|
|
||||||
(TAG_VALID_SPAN_FOREIGN, lo, hi)
|
(TAG_VALID_SPAN_FOREIGN, lo, hi, metadata_index)
|
||||||
} else {
|
} else {
|
||||||
(TAG_VALID_SPAN_LOCAL, span.lo, span.hi)
|
// Record the fact that we need to encode the data for this `SourceFile`
|
||||||
};
|
let source_files =
|
||||||
|
s.required_source_files.as_mut().expect("Already encoded SourceMap!");
|
||||||
|
let (source_file_index, _) = source_files.insert_full(s.source_file_cache.1);
|
||||||
|
|
||||||
|
(TAG_VALID_SPAN_LOCAL, span.lo, span.hi, source_file_index)
|
||||||
|
};
|
||||||
|
|
||||||
tag.encode(s);
|
tag.encode(s);
|
||||||
lo.encode(s);
|
lo.encode(s);
|
||||||
|
@ -298,6 +298,9 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for Span {
|
||||||
let len = hi - lo;
|
let len = hi - lo;
|
||||||
len.encode(s);
|
len.encode(s);
|
||||||
|
|
||||||
|
// Encode the index of the `SourceFile` for the span, in order to make decoding faster.
|
||||||
|
metadata_index.encode(s);
|
||||||
|
|
||||||
if tag == TAG_VALID_SPAN_FOREIGN {
|
if tag == TAG_VALID_SPAN_FOREIGN {
|
||||||
// This needs to be two lines to avoid holding the `s.source_file_cache`
|
// This needs to be two lines to avoid holding the `s.source_file_cache`
|
||||||
// while calling `cnum.encode(s)`
|
// while calling `cnum.encode(s)`
|
||||||
|
@ -460,18 +463,17 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
|
|
||||||
let working_directory = &self.tcx.sess.opts.working_dir;
|
let working_directory = &self.tcx.sess.opts.working_dir;
|
||||||
|
|
||||||
let adapted = all_source_files
|
// Only serialize `SourceFile`s that were used during the encoding of a `Span`.
|
||||||
|
//
|
||||||
|
// The order in which we encode source files is important here: the on-disk format for
|
||||||
|
// `Span` contains the index of the corresponding `SourceFile`.
|
||||||
|
let adapted = required_source_files
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.map(|&source_file_index| &all_source_files[source_file_index])
|
||||||
.filter(|(idx, source_file)| {
|
.map(|source_file| {
|
||||||
// Only serialize `SourceFile`s that were used
|
// Don't serialize imported `SourceFile`s, unless we're in a proc-macro crate.
|
||||||
// during the encoding of a `Span`
|
assert!(!source_file.is_imported() || self.is_proc_macro);
|
||||||
required_source_files.contains(*idx) &&
|
|
||||||
// Don't serialize imported `SourceFile`s, unless
|
|
||||||
// we're in a proc-macro crate.
|
|
||||||
(!source_file.is_imported() || self.is_proc_macro)
|
|
||||||
})
|
|
||||||
.map(|(_, source_file)| {
|
|
||||||
// At export time we expand all source file paths to absolute paths because
|
// At export time we expand all source file paths to absolute paths because
|
||||||
// downstream compilation sessions can have a different compiler working
|
// downstream compilation sessions can have a different compiler working
|
||||||
// directory, so relative paths from this or any other upstream crate
|
// directory, so relative paths from this or any other upstream crate
|
||||||
|
@ -2228,7 +2230,7 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>, path: &Path) {
|
||||||
|
|
||||||
let source_map_files = tcx.sess.source_map().files();
|
let source_map_files = tcx.sess.source_map().files();
|
||||||
let source_file_cache = (source_map_files[0].clone(), 0);
|
let source_file_cache = (source_map_files[0].clone(), 0);
|
||||||
let required_source_files = Some(GrowableBitSet::with_capacity(source_map_files.len()));
|
let required_source_files = Some(FxIndexSet::default());
|
||||||
drop(source_map_files);
|
drop(source_map_files);
|
||||||
|
|
||||||
let hygiene_ctxt = HygieneEncodeContext::default();
|
let hygiene_ctxt = HygieneEncodeContext::default();
|
||||||
|
|
|
@ -1098,6 +1098,8 @@ pub enum ExternalSource {
|
||||||
original_start_pos: BytePos,
|
original_start_pos: BytePos,
|
||||||
/// The end of this SourceFile within the source_map of its original crate.
|
/// The end of this SourceFile within the source_map of its original crate.
|
||||||
original_end_pos: BytePos,
|
original_end_pos: BytePos,
|
||||||
|
/// Index of the file inside metadata.
|
||||||
|
metadata_index: u32,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -337,6 +337,7 @@ impl SourceMap {
|
||||||
mut file_local_normalized_pos: Vec<NormalizedPos>,
|
mut file_local_normalized_pos: Vec<NormalizedPos>,
|
||||||
original_start_pos: BytePos,
|
original_start_pos: BytePos,
|
||||||
original_end_pos: BytePos,
|
original_end_pos: BytePos,
|
||||||
|
metadata_index: u32,
|
||||||
) -> Lrc<SourceFile> {
|
) -> Lrc<SourceFile> {
|
||||||
let start_pos = self
|
let start_pos = self
|
||||||
.allocate_address_space(source_len)
|
.allocate_address_space(source_len)
|
||||||
|
@ -383,6 +384,7 @@ impl SourceMap {
|
||||||
kind: ExternalSourceKind::AbsentOk,
|
kind: ExternalSourceKind::AbsentOk,
|
||||||
original_start_pos,
|
original_start_pos,
|
||||||
original_end_pos,
|
original_end_pos,
|
||||||
|
metadata_index,
|
||||||
}),
|
}),
|
||||||
start_pos,
|
start_pos,
|
||||||
end_pos,
|
end_pos,
|
||||||
|
|
|
@ -252,6 +252,7 @@ fn t10() {
|
||||||
normalized_pos,
|
normalized_pos,
|
||||||
start_pos,
|
start_pos,
|
||||||
end_pos,
|
end_pos,
|
||||||
|
0,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue