tree-wide: parallel: Fully removed all Lrc
, replaced with Arc
This commit is contained in:
parent
613bdd4997
commit
0a21f1d0a2
77 changed files with 405 additions and 395 deletions
|
@ -1,6 +1,5 @@
|
|||
use std::ops::Range;
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::source_map::SourceMap;
|
||||
use crate::{BytePos, Pos, RelativeBytePos, SourceFile, SpanData};
|
||||
|
@ -22,7 +21,7 @@ struct CacheEntry {
|
|||
// misses for these rare positions. A line lookup for the position via `SourceMap::lookup_line`
|
||||
// after a cache miss will produce the last line number, as desired.
|
||||
line: Range<BytePos>,
|
||||
file: Lrc<SourceFile>,
|
||||
file: Arc<SourceFile>,
|
||||
file_index: usize,
|
||||
}
|
||||
|
||||
|
@ -30,7 +29,7 @@ impl CacheEntry {
|
|||
#[inline]
|
||||
fn update(
|
||||
&mut self,
|
||||
new_file_and_idx: Option<(Lrc<SourceFile>, usize)>,
|
||||
new_file_and_idx: Option<(Arc<SourceFile>, usize)>,
|
||||
pos: BytePos,
|
||||
time_stamp: usize,
|
||||
) {
|
||||
|
@ -63,7 +62,7 @@ pub struct CachingSourceMapView<'sm> {
|
|||
impl<'sm> CachingSourceMapView<'sm> {
|
||||
pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> {
|
||||
let files = source_map.files();
|
||||
let first_file = Lrc::clone(&files[0]);
|
||||
let first_file = Arc::clone(&files[0]);
|
||||
let entry = CacheEntry {
|
||||
time_stamp: 0,
|
||||
line_number: 0,
|
||||
|
@ -82,7 +81,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
|||
pub fn byte_pos_to_line_and_col(
|
||||
&mut self,
|
||||
pos: BytePos,
|
||||
) -> Option<(Lrc<SourceFile>, usize, RelativeBytePos)> {
|
||||
) -> Option<(Arc<SourceFile>, usize, RelativeBytePos)> {
|
||||
self.time_stamp += 1;
|
||||
|
||||
// Check if the position is in one of the cached lines
|
||||
|
@ -92,7 +91,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
|||
cache_entry.touch(self.time_stamp);
|
||||
|
||||
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
||||
return Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col));
|
||||
return Some((Arc::clone(&cache_entry.file), cache_entry.line_number, col));
|
||||
}
|
||||
|
||||
// No cache hit ...
|
||||
|
@ -109,13 +108,13 @@ impl<'sm> CachingSourceMapView<'sm> {
|
|||
cache_entry.update(new_file_and_idx, pos, self.time_stamp);
|
||||
|
||||
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
||||
Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col))
|
||||
Some((Arc::clone(&cache_entry.file), cache_entry.line_number, col))
|
||||
}
|
||||
|
||||
pub fn span_data_to_lines_and_cols(
|
||||
&mut self,
|
||||
span_data: &SpanData,
|
||||
) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)> {
|
||||
) -> Option<(Arc<SourceFile>, usize, BytePos, usize, BytePos)> {
|
||||
self.time_stamp += 1;
|
||||
|
||||
// Check if lo and hi are in the cached lines.
|
||||
|
@ -133,7 +132,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
|||
}
|
||||
|
||||
(
|
||||
Lrc::clone(&lo.file),
|
||||
Arc::clone(&lo.file),
|
||||
lo.line_number,
|
||||
span_data.lo - lo.line.start,
|
||||
hi.line_number,
|
||||
|
@ -181,7 +180,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
|||
lo.update(new_file_and_idx, span_data.lo, self.time_stamp);
|
||||
|
||||
if !lo.line.contains(&span_data.hi) {
|
||||
let new_file_and_idx = Some((Lrc::clone(&lo.file), lo.file_index));
|
||||
let new_file_and_idx = Some((Arc::clone(&lo.file), lo.file_index));
|
||||
let next_oldest = self.oldest_cache_entry_index_avoid(oldest);
|
||||
let hi = &mut self.line_cache[next_oldest];
|
||||
hi.update(new_file_and_idx, span_data.hi, self.time_stamp);
|
||||
|
@ -227,7 +226,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
|||
assert_eq!(lo.file_index, hi.file_index);
|
||||
|
||||
Some((
|
||||
Lrc::clone(&lo.file),
|
||||
Arc::clone(&lo.file),
|
||||
lo.line_number,
|
||||
span_data.lo - lo.line.start,
|
||||
hi.line_number,
|
||||
|
@ -271,13 +270,13 @@ impl<'sm> CachingSourceMapView<'sm> {
|
|||
oldest
|
||||
}
|
||||
|
||||
fn file_for_position(&self, pos: BytePos) -> Option<(Lrc<SourceFile>, usize)> {
|
||||
fn file_for_position(&self, pos: BytePos) -> Option<(Arc<SourceFile>, usize)> {
|
||||
if !self.source_map.files().is_empty() {
|
||||
let file_idx = self.source_map.lookup_source_file_idx(pos);
|
||||
let file = &self.source_map.files()[file_idx];
|
||||
|
||||
if file_contains(file, pos) {
|
||||
return Some((Lrc::clone(file), file_idx));
|
||||
return Some((Arc::clone(file), file_idx));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,11 +29,12 @@ use std::collections::hash_map::Entry;
|
|||
use std::collections::hash_set::Entry as SetEntry;
|
||||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::stable_hasher::{Hash64, HashStable, HashingControls, StableHasher};
|
||||
use rustc_data_structures::sync::{Lock, Lrc, WorkerLocal};
|
||||
use rustc_data_structures::sync::{Lock, WorkerLocal};
|
||||
use rustc_data_structures::unhash::UnhashMap;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
|
@ -904,7 +905,7 @@ impl Span {
|
|||
/// allowed inside this span.
|
||||
pub fn mark_with_reason(
|
||||
self,
|
||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
||||
allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||
reason: DesugaringKind,
|
||||
edition: Edition,
|
||||
ctx: impl HashStableContext,
|
||||
|
@ -959,7 +960,7 @@ pub struct ExpnData {
|
|||
/// List of `#[unstable]`/feature-gated features that the macro is allowed to use
|
||||
/// internally without forcing the whole crate to opt-in
|
||||
/// to them.
|
||||
pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
||||
pub allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||
/// Edition of the crate in which the macro is defined.
|
||||
pub edition: Edition,
|
||||
/// The `DefId` of the macro being invoked,
|
||||
|
@ -985,7 +986,7 @@ impl ExpnData {
|
|||
parent: ExpnId,
|
||||
call_site: Span,
|
||||
def_site: Span,
|
||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
||||
allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||
edition: Edition,
|
||||
macro_def_id: Option<DefId>,
|
||||
parent_module: Option<DefId>,
|
||||
|
@ -1037,7 +1038,7 @@ impl ExpnData {
|
|||
kind: ExpnKind,
|
||||
call_site: Span,
|
||||
edition: Edition,
|
||||
allow_internal_unstable: Lrc<[Symbol]>,
|
||||
allow_internal_unstable: Arc<[Symbol]>,
|
||||
macro_def_id: Option<DefId>,
|
||||
parent_module: Option<DefId>,
|
||||
) -> ExpnData {
|
||||
|
|
|
@ -83,11 +83,12 @@ use std::io::{self, Read};
|
|||
use std::ops::{Add, Range, Sub};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::{fmt, iter};
|
||||
|
||||
use md5::{Digest, Md5};
|
||||
use rustc_data_structures::stable_hasher::{Hash64, Hash128, HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock, Lrc};
|
||||
use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock};
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use sha1::Sha1;
|
||||
use sha2::Sha256;
|
||||
|
@ -110,7 +111,7 @@ pub struct SessionGlobals {
|
|||
/// The session's source map, if there is one. This field should only be
|
||||
/// used in places where the `Session` is truly not available, such as
|
||||
/// `<Span as Debug>::fmt`.
|
||||
source_map: Option<Lrc<SourceMap>>,
|
||||
source_map: Option<Arc<SourceMap>>,
|
||||
}
|
||||
|
||||
impl SessionGlobals {
|
||||
|
@ -120,7 +121,7 @@ impl SessionGlobals {
|
|||
span_interner: Lock::new(span_encoding::SpanInterner::default()),
|
||||
metavar_spans: Default::default(),
|
||||
hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
|
||||
source_map: sm_inputs.map(|inputs| Lrc::new(SourceMap::with_inputs(inputs))),
|
||||
source_map: sm_inputs.map(|inputs| Arc::new(SourceMap::with_inputs(inputs))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1430,7 +1431,7 @@ pub enum ExternalSource {
|
|||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub enum ExternalSourceKind {
|
||||
/// The external source has been loaded already.
|
||||
Present(Lrc<String>),
|
||||
Present(Arc<String>),
|
||||
/// No attempt has been made to load the external source.
|
||||
AbsentOk,
|
||||
/// A failed attempt has been made to load the external source.
|
||||
|
@ -1670,7 +1671,7 @@ pub struct SourceFile {
|
|||
/// (e.g., `<anon>`).
|
||||
pub name: FileName,
|
||||
/// The complete source code.
|
||||
pub src: Option<Lrc<String>>,
|
||||
pub src: Option<Arc<String>>,
|
||||
/// The source code's hash.
|
||||
pub src_hash: SourceFileHash,
|
||||
/// Used to enable cargo to use checksums to check if a crate is fresh rather
|
||||
|
@ -1931,7 +1932,7 @@ impl SourceFile {
|
|||
|
||||
Ok(SourceFile {
|
||||
name,
|
||||
src: Some(Lrc::new(src)),
|
||||
src: Some(Arc::new(src)),
|
||||
src_hash,
|
||||
checksum_hash,
|
||||
external_src: FreezeLock::frozen(ExternalSource::Unneeded),
|
||||
|
@ -2050,7 +2051,7 @@ impl SourceFile {
|
|||
} = &mut *external_src
|
||||
{
|
||||
*src_kind = if let Some(src) = src {
|
||||
ExternalSourceKind::Present(Lrc::new(src))
|
||||
ExternalSourceKind::Present(Arc::new(src))
|
||||
} else {
|
||||
ExternalSourceKind::AbsentErr
|
||||
};
|
||||
|
@ -2490,7 +2491,7 @@ impl<D: Decoder> Decodable<D> for RelativeBytePos {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct Loc {
|
||||
/// Information about the original source.
|
||||
pub file: Lrc<SourceFile>,
|
||||
pub file: Arc<SourceFile>,
|
||||
/// The (1-based) line number.
|
||||
pub line: usize,
|
||||
/// The (0-based) column offset.
|
||||
|
@ -2502,13 +2503,13 @@ pub struct Loc {
|
|||
// Used to be structural records.
|
||||
#[derive(Debug)]
|
||||
pub struct SourceFileAndLine {
|
||||
pub sf: Lrc<SourceFile>,
|
||||
pub sf: Arc<SourceFile>,
|
||||
/// Index of line, starting from 0.
|
||||
pub line: usize,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct SourceFileAndBytePos {
|
||||
pub sf: Lrc<SourceFile>,
|
||||
pub sf: Arc<SourceFile>,
|
||||
pub pos: BytePos,
|
||||
}
|
||||
|
||||
|
@ -2525,7 +2526,7 @@ pub struct LineInfo {
|
|||
}
|
||||
|
||||
pub struct FileLines {
|
||||
pub file: Lrc<SourceFile>,
|
||||
pub file: Arc<SourceFile>,
|
||||
pub lines: Vec<LineInfo>,
|
||||
}
|
||||
|
||||
|
@ -2591,7 +2592,7 @@ pub trait HashStableContext {
|
|||
fn span_data_to_lines_and_cols(
|
||||
&mut self,
|
||||
span: &SpanData,
|
||||
) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)>;
|
||||
) -> Option<(Arc<SourceFile>, usize, BytePos, usize, BytePos)>;
|
||||
fn hashing_controls(&self) -> HashingControls;
|
||||
}
|
||||
|
||||
|
|
|
@ -102,8 +102,8 @@ pub trait FileLoader {
|
|||
fn read_file(&self, path: &Path) -> io::Result<String>;
|
||||
|
||||
/// Read the contents of a potentially non-UTF-8 file into memory.
|
||||
/// We don't normalize binary files, so we can start in an Lrc.
|
||||
fn read_binary_file(&self, path: &Path) -> io::Result<Lrc<[u8]>>;
|
||||
/// We don't normalize binary files, so we can start in an Arc.
|
||||
fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>>;
|
||||
}
|
||||
|
||||
/// A FileLoader that uses std::fs to load real files.
|
||||
|
@ -124,12 +124,12 @@ impl FileLoader for RealFileLoader {
|
|||
fs::read_to_string(path)
|
||||
}
|
||||
|
||||
fn read_binary_file(&self, path: &Path) -> io::Result<Lrc<[u8]>> {
|
||||
fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>> {
|
||||
let mut file = fs::File::open(path)?;
|
||||
let len = file.metadata()?.len();
|
||||
|
||||
let mut bytes = Lrc::new_uninit_slice(len as usize);
|
||||
let mut buf = BorrowedBuf::from(Lrc::get_mut(&mut bytes).unwrap());
|
||||
let mut bytes = Arc::new_uninit_slice(len as usize);
|
||||
let mut buf = BorrowedBuf::from(Arc::get_mut(&mut bytes).unwrap());
|
||||
match file.read_buf_exact(buf.unfilled()) {
|
||||
Ok(()) => {}
|
||||
Err(e) if e.kind() == io::ErrorKind::UnexpectedEof => {
|
||||
|
@ -146,9 +146,9 @@ impl FileLoader for RealFileLoader {
|
|||
// But we are not guaranteed to be at the end of the file, because we did not attempt to do
|
||||
// a read with a non-zero-sized buffer and get Ok(0).
|
||||
// So we do small read to a fixed-size buffer. If the read returns no bytes then we're
|
||||
// already done, and we just return the Lrc we built above.
|
||||
// already done, and we just return the Arc we built above.
|
||||
// If the read returns bytes however, we just fall back to reading into a Vec then turning
|
||||
// that into an Lrc, losing our nice peak memory behavior. This fallback code path should
|
||||
// that into an Arc, losing our nice peak memory behavior. This fallback code path should
|
||||
// be rarely exercised.
|
||||
|
||||
let mut probe = [0u8; 32];
|
||||
|
@ -172,8 +172,8 @@ impl FileLoader for RealFileLoader {
|
|||
|
||||
#[derive(Default)]
|
||||
struct SourceMapFiles {
|
||||
source_files: monotonic::MonotonicVec<Lrc<SourceFile>>,
|
||||
stable_id_to_source_file: UnhashMap<StableSourceFileId, Lrc<SourceFile>>,
|
||||
source_files: monotonic::MonotonicVec<Arc<SourceFile>>,
|
||||
stable_id_to_source_file: UnhashMap<StableSourceFileId, Arc<SourceFile>>,
|
||||
}
|
||||
|
||||
/// Used to construct a `SourceMap` with `SourceMap::with_inputs`.
|
||||
|
@ -232,7 +232,7 @@ impl SourceMap {
|
|||
self.file_loader.file_exists(path)
|
||||
}
|
||||
|
||||
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
|
||||
pub fn load_file(&self, path: &Path) -> io::Result<Arc<SourceFile>> {
|
||||
let src = self.file_loader.read_file(path)?;
|
||||
let filename = path.to_owned().into();
|
||||
Ok(self.new_source_file(filename, src))
|
||||
|
@ -242,7 +242,7 @@ impl SourceMap {
|
|||
///
|
||||
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
|
||||
/// takes place.
|
||||
pub fn load_binary_file(&self, path: &Path) -> io::Result<(Lrc<[u8]>, Span)> {
|
||||
pub fn load_binary_file(&self, path: &Path) -> io::Result<(Arc<[u8]>, Span)> {
|
||||
let bytes = self.file_loader.read_binary_file(path)?;
|
||||
|
||||
// We need to add file to the `SourceMap`, so that it is present
|
||||
|
@ -265,14 +265,14 @@ impl SourceMap {
|
|||
|
||||
// By returning a `MonotonicVec`, we ensure that consumers cannot invalidate
|
||||
// any existing indices pointing into `files`.
|
||||
pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Lrc<SourceFile>>> {
|
||||
pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Arc<SourceFile>>> {
|
||||
ReadGuard::map(self.files.borrow(), |files| &files.source_files)
|
||||
}
|
||||
|
||||
pub fn source_file_by_stable_id(
|
||||
&self,
|
||||
stable_id: StableSourceFileId,
|
||||
) -> Option<Lrc<SourceFile>> {
|
||||
) -> Option<Arc<SourceFile>> {
|
||||
self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned()
|
||||
}
|
||||
|
||||
|
@ -280,7 +280,7 @@ impl SourceMap {
|
|||
&self,
|
||||
file_id: StableSourceFileId,
|
||||
mut file: SourceFile,
|
||||
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
||||
) -> Result<Arc<SourceFile>, OffsetOverflowError> {
|
||||
let mut files = self.files.borrow_mut();
|
||||
|
||||
file.start_pos = BytePos(if let Some(last_file) = files.source_files.last() {
|
||||
|
@ -291,9 +291,9 @@ impl SourceMap {
|
|||
0
|
||||
});
|
||||
|
||||
let file = Lrc::new(file);
|
||||
files.source_files.push(Lrc::clone(&file));
|
||||
files.stable_id_to_source_file.insert(file_id, Lrc::clone(&file));
|
||||
let file = Arc::new(file);
|
||||
files.source_files.push(Arc::clone(&file));
|
||||
files.stable_id_to_source_file.insert(file_id, Arc::clone(&file));
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
@ -301,7 +301,7 @@ impl SourceMap {
|
|||
/// Creates a new `SourceFile`.
|
||||
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
|
||||
/// unmodified.
|
||||
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
|
||||
pub fn new_source_file(&self, filename: FileName, src: String) -> Arc<SourceFile> {
|
||||
self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| {
|
||||
eprintln!(
|
||||
"fatal error: rustc does not support text files larger than {} bytes",
|
||||
|
@ -315,7 +315,7 @@ impl SourceMap {
|
|||
&self,
|
||||
filename: FileName,
|
||||
src: String,
|
||||
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
||||
) -> Result<Arc<SourceFile>, OffsetOverflowError> {
|
||||
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
|
||||
// but this is okay because the directory determined by `path.pop()` will
|
||||
// be empty, so the working directory will be used.
|
||||
|
@ -353,7 +353,7 @@ impl SourceMap {
|
|||
multibyte_chars: Vec<MultiByteChar>,
|
||||
normalized_pos: Vec<NormalizedPos>,
|
||||
metadata_index: u32,
|
||||
) -> Lrc<SourceFile> {
|
||||
) -> Arc<SourceFile> {
|
||||
let source_len = RelativeBytePos::from_u32(source_len);
|
||||
|
||||
let source_file = SourceFile {
|
||||
|
@ -393,9 +393,9 @@ impl SourceMap {
|
|||
}
|
||||
|
||||
/// Return the SourceFile that contains the given `BytePos`
|
||||
pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> {
|
||||
pub fn lookup_source_file(&self, pos: BytePos) -> Arc<SourceFile> {
|
||||
let idx = self.lookup_source_file_idx(pos);
|
||||
Lrc::clone(&(*self.files.borrow().source_files)[idx])
|
||||
Arc::clone(&(*self.files.borrow().source_files)[idx])
|
||||
}
|
||||
|
||||
/// Looks up source information about a `BytePos`.
|
||||
|
@ -406,7 +406,7 @@ impl SourceMap {
|
|||
}
|
||||
|
||||
/// If the corresponding `SourceFile` is empty, does not return a line number.
|
||||
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
|
||||
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Arc<SourceFile>> {
|
||||
let f = self.lookup_source_file(pos);
|
||||
|
||||
let pos = f.relative_position(pos);
|
||||
|
@ -441,7 +441,7 @@ impl SourceMap {
|
|||
pub fn span_to_location_info(
|
||||
&self,
|
||||
sp: Span,
|
||||
) -> (Option<Lrc<SourceFile>>, usize, usize, usize, usize) {
|
||||
) -> (Option<Arc<SourceFile>>, usize, usize, usize, usize) {
|
||||
if self.files.borrow().source_files.is_empty() || sp.is_dummy() {
|
||||
return (None, 0, 0, 0, 0);
|
||||
}
|
||||
|
@ -477,7 +477,7 @@ impl SourceMap {
|
|||
if lo != hi {
|
||||
return true;
|
||||
}
|
||||
let f = Lrc::clone(&(*self.files.borrow().source_files)[lo]);
|
||||
let f = Arc::clone(&(*self.files.borrow().source_files)[lo]);
|
||||
let lo = f.relative_position(sp.lo());
|
||||
let hi = f.relative_position(sp.hi());
|
||||
f.lookup_line(lo) != f.lookup_line(hi)
|
||||
|
@ -998,12 +998,12 @@ impl SourceMap {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
|
||||
pub fn get_source_file(&self, filename: &FileName) -> Option<Arc<SourceFile>> {
|
||||
// Remap filename before lookup
|
||||
let filename = self.path_mapping().map_filename_prefix(filename).0;
|
||||
for sf in self.files.borrow().source_files.iter() {
|
||||
if filename == sf.name {
|
||||
return Some(Lrc::clone(&sf));
|
||||
return Some(Arc::clone(&sf));
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -1012,7 +1012,7 @@ impl SourceMap {
|
|||
/// For a global `BytePos`, computes the local offset within the containing `SourceFile`.
|
||||
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
|
||||
let idx = self.lookup_source_file_idx(bpos);
|
||||
let sf = Lrc::clone(&(*self.files.borrow().source_files)[idx]);
|
||||
let sf = Arc::clone(&(*self.files.borrow().source_files)[idx]);
|
||||
let offset = bpos - sf.start_pos;
|
||||
SourceFileAndBytePos { sf, pos: offset }
|
||||
}
|
||||
|
@ -1082,7 +1082,7 @@ impl SourceMap {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_source_map() -> Option<Lrc<SourceMap>> {
|
||||
pub fn get_source_map() -> Option<Arc<SourceMap>> {
|
||||
with_session_globals(|session_globals| session_globals.source_map.clone())
|
||||
}
|
||||
|
||||
|
|
|
@ -538,7 +538,7 @@ fn test_next_point() {
|
|||
#[cfg(target_os = "linux")]
|
||||
#[test]
|
||||
fn read_binary_file_handles_lying_stat() {
|
||||
// read_binary_file tries to read the contents of a file into an Lrc<[u8]> while
|
||||
// read_binary_file tries to read the contents of a file into an Arc<[u8]> while
|
||||
// never having two copies of the data in memory at once. This is an optimization
|
||||
// to support include_bytes! with large files. But since Rust allocators are
|
||||
// sensitive to alignment, our implementation can't be bootstrapped off calling
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue