1
Fork 0

Use Freeze for SourceFile.lines

This commit is contained in:
John Kåre Alsaker 2023-08-31 22:12:47 +02:00
parent c5996b80be
commit f49382c050
11 changed files with 199 additions and 179 deletions

View file

@ -81,7 +81,7 @@ impl DebugContext {
match tcx.sess.source_map().lookup_line(span.lo()) { match tcx.sess.source_map().lookup_line(span.lo()) {
Ok(SourceFileAndLine { sf: file, line }) => { Ok(SourceFileAndLine { sf: file, line }) => {
let line_pos = file.lines(|lines| lines[line]); let line_pos = file.lines()[line];
let col = file.relative_position(span.lo()) - line_pos; let col = file.relative_position(span.lo()) - line_pos;
(file, u64::try_from(line).unwrap() + 1, u64::from(col.to_u32()) + 1) (file, u64::try_from(line).unwrap() + 1, u64::from(col.to_u32()) + 1)

View file

@ -263,7 +263,7 @@ impl CodegenCx<'_, '_> {
pub fn lookup_debug_loc(&self, pos: BytePos) -> DebugLoc { pub fn lookup_debug_loc(&self, pos: BytePos) -> DebugLoc {
let (file, line, col) = match self.sess().source_map().lookup_line(pos) { let (file, line, col) = match self.sess().source_map().lookup_line(pos) {
Ok(SourceFileAndLine { sf: file, line }) => { Ok(SourceFileAndLine { sf: file, line }) => {
let line_pos = file.lines(|lines| lines[line]); let line_pos = file.lines()[line];
// Use 1-based indexing. // Use 1-based indexing.
let line = (line + 1) as u32; let line = (line + 1) as u32;

View file

@ -3,6 +3,7 @@ use crate::sync::{AtomicBool, ReadGuard, RwLock, WriteGuard};
use crate::sync::{DynSend, DynSync}; use crate::sync::{DynSend, DynSync};
use std::{ use std::{
cell::UnsafeCell, cell::UnsafeCell,
intrinsics::likely,
marker::PhantomData, marker::PhantomData,
ops::{Deref, DerefMut}, ops::{Deref, DerefMut},
sync::atomic::Ordering, sync::atomic::Ordering,
@ -49,6 +50,17 @@ impl<T> FreezeLock<T> {
self.frozen.load(Ordering::Acquire) self.frozen.load(Ordering::Acquire)
} }
/// Get the inner value if frozen.
#[inline]
pub fn get(&self) -> Option<&T> {
if likely(self.frozen.load(Ordering::Acquire)) {
// SAFETY: This is frozen so the data cannot be modified.
unsafe { Some(&*self.data.get()) }
} else {
None
}
}
#[inline] #[inline]
pub fn read(&self) -> FreezeReadGuard<'_, T> { pub fn read(&self) -> FreezeReadGuard<'_, T> {
FreezeReadGuard { FreezeReadGuard {

View file

@ -692,7 +692,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span {
let len = BytePos::decode(decoder); let len = BytePos::decode(decoder);
let file_lo = decoder.file_index_to_file(file_lo_index); let file_lo = decoder.file_index_to_file(file_lo_index);
let lo = file_lo.lines(|lines| lines[line_lo - 1] + col_lo); let lo = file_lo.lines()[line_lo - 1] + col_lo;
let lo = file_lo.absolute_position(lo); let lo = file_lo.absolute_position(lo);
let hi = lo + len; let hi = lo + len;

View file

@ -79,15 +79,16 @@ impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
src_hash.hash_stable(hcx, hasher); src_hash.hash_stable(hcx, hasher);
// We are always in `Lines` form by the time we reach here. {
assert!(self.lines.borrow().is_lines()); // We are always in `Lines` form by the time we reach here.
self.lines(|lines| { assert!(self.lines.read().is_lines());
let lines = self.lines();
// We only hash the relative position within this source_file // We only hash the relative position within this source_file
lines.len().hash_stable(hcx, hasher); lines.len().hash_stable(hcx, hasher);
for &line in lines.iter() { for &line in lines.iter() {
line.hash_stable(hcx, hasher); line.hash_stable(hcx, hasher);
} }
}); }
// We only hash the relative position within this source_file // We only hash the relative position within this source_file
multibyte_chars.len().hash_stable(hcx, hasher); multibyte_chars.len().hash_stable(hcx, hasher);

View file

@ -33,7 +33,7 @@ extern crate rustc_macros;
#[macro_use] #[macro_use]
extern crate tracing; extern crate tracing;
use rustc_data_structures::AtomicRef; use rustc_data_structures::{cold_path, AtomicRef};
use rustc_macros::HashStable_Generic; use rustc_macros::HashStable_Generic;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
@ -1348,7 +1348,7 @@ pub struct SourceFile {
/// The byte length of this source. /// The byte length of this source.
pub source_len: RelativeBytePos, pub source_len: RelativeBytePos,
/// Locations of lines beginnings in the source code. /// Locations of lines beginnings in the source code.
pub lines: Lock<SourceFileLines>, pub lines: FreezeLock<SourceFileLines>,
/// Locations of multi-byte characters in the source code. /// Locations of multi-byte characters in the source code.
pub multibyte_chars: Vec<MultiByteChar>, pub multibyte_chars: Vec<MultiByteChar>,
/// Width of characters that are not narrow in the source code. /// Width of characters that are not narrow in the source code.
@ -1373,7 +1373,10 @@ impl Clone for SourceFile {
}, },
start_pos: self.start_pos, start_pos: self.start_pos,
source_len: self.source_len, source_len: self.source_len,
lines: Lock::new(self.lines.borrow().clone()), lines: {
let lock = self.lines.read();
FreezeLock::with(lock.clone(), self.lines.is_frozen())
},
multibyte_chars: self.multibyte_chars.clone(), multibyte_chars: self.multibyte_chars.clone(),
non_narrow_chars: self.non_narrow_chars.clone(), non_narrow_chars: self.non_narrow_chars.clone(),
normalized_pos: self.normalized_pos.clone(), normalized_pos: self.normalized_pos.clone(),
@ -1391,64 +1394,63 @@ impl<S: Encoder> Encodable<S> for SourceFile {
self.source_len.encode(s); self.source_len.encode(s);
// We are always in `Lines` form by the time we reach here. // We are always in `Lines` form by the time we reach here.
assert!(self.lines.borrow().is_lines()); assert!(self.lines.read().is_lines());
self.lines(|lines| { let lines = self.lines();
// Store the length. // Store the length.
s.emit_u32(lines.len() as u32); s.emit_u32(lines.len() as u32);
// Compute and store the difference list. // Compute and store the difference list.
if lines.len() != 0 { if lines.len() != 0 {
let max_line_length = if lines.len() == 1 { let max_line_length = if lines.len() == 1 {
0 0
} else { } else {
lines lines
.array_windows() .array_windows()
.map(|&[fst, snd]| snd - fst) .map(|&[fst, snd]| snd - fst)
.map(|bp| bp.to_usize()) .map(|bp| bp.to_usize())
.max() .max()
.unwrap() .unwrap()
}; };
let bytes_per_diff: usize = match max_line_length { let bytes_per_diff: usize = match max_line_length {
0..=0xFF => 1, 0..=0xFF => 1,
0x100..=0xFFFF => 2, 0x100..=0xFFFF => 2,
_ => 4, _ => 4,
}; };
// Encode the number of bytes used per diff. // Encode the number of bytes used per diff.
s.emit_u8(bytes_per_diff as u8); s.emit_u8(bytes_per_diff as u8);
// Encode the first element. // Encode the first element.
assert_eq!(lines[0], RelativeBytePos(0)); assert_eq!(lines[0], RelativeBytePos(0));
// Encode the difference list. // Encode the difference list.
let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst); let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
let num_diffs = lines.len() - 1; let num_diffs = lines.len() - 1;
let mut raw_diffs; let mut raw_diffs;
match bytes_per_diff { match bytes_per_diff {
1 => { 1 => {
raw_diffs = Vec::with_capacity(num_diffs); raw_diffs = Vec::with_capacity(num_diffs);
for diff in diff_iter { for diff in diff_iter {
raw_diffs.push(diff.0 as u8); raw_diffs.push(diff.0 as u8);
}
} }
2 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
}
}
4 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0).to_le_bytes());
}
}
_ => unreachable!(),
} }
s.emit_raw_bytes(&raw_diffs); 2 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
}
}
4 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0).to_le_bytes());
}
}
_ => unreachable!(),
} }
}); s.emit_raw_bytes(&raw_diffs);
}
self.multibyte_chars.encode(s); self.multibyte_chars.encode(s);
self.non_narrow_chars.encode(s); self.non_narrow_chars.encode(s);
@ -1491,7 +1493,7 @@ impl<D: Decoder> Decodable<D> for SourceFile {
// Unused - the metadata decoder will construct // Unused - the metadata decoder will construct
// a new SourceFile, filling in `external_src` properly // a new SourceFile, filling in `external_src` properly
external_src: FreezeLock::frozen(ExternalSource::Unneeded), external_src: FreezeLock::frozen(ExternalSource::Unneeded),
lines: Lock::new(lines), lines: FreezeLock::new(lines),
multibyte_chars, multibyte_chars,
non_narrow_chars, non_narrow_chars,
normalized_pos, normalized_pos,
@ -1535,7 +1537,7 @@ impl SourceFile {
external_src: FreezeLock::frozen(ExternalSource::Unneeded), external_src: FreezeLock::frozen(ExternalSource::Unneeded),
start_pos: BytePos::from_u32(0), start_pos: BytePos::from_u32(0),
source_len: RelativeBytePos::from_u32(source_len), source_len: RelativeBytePos::from_u32(source_len),
lines: Lock::new(SourceFileLines::Lines(lines)), lines: FreezeLock::frozen(SourceFileLines::Lines(lines)),
multibyte_chars, multibyte_chars,
non_narrow_chars, non_narrow_chars,
normalized_pos, normalized_pos,
@ -1544,65 +1546,82 @@ impl SourceFile {
}) })
} }
pub fn lines<F, R>(&self, f: F) -> R /// This converts the `lines` field to contain `SourceFileLines::Lines` if needed and freezes it.
where fn convert_diffs_to_lines_frozen(&self) {
F: FnOnce(&[RelativeBytePos]) -> R, let mut guard = if let Some(guard) = self.lines.try_write() { guard } else { return };
{
let mut guard = self.lines.borrow_mut();
match &*guard {
SourceFileLines::Lines(lines) => f(lines),
SourceFileLines::Diffs(SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs }) => {
// Convert from "diffs" form to "lines" form.
let num_lines = num_diffs + 1;
let mut lines = Vec::with_capacity(num_lines);
let mut line_start = RelativeBytePos(0);
lines.push(line_start);
assert_eq!(*num_diffs, raw_diffs.len() / bytes_per_diff); let SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs } = match &*guard {
match bytes_per_diff { SourceFileLines::Diffs(diffs) => diffs,
1 => { SourceFileLines::Lines(..) => {
lines.extend(raw_diffs.into_iter().map(|&diff| { FreezeWriteGuard::freeze(guard);
line_start = line_start + RelativeBytePos(diff as u32); return;
line_start
}));
}
2 => {
lines.extend((0..*num_diffs).map(|i| {
let pos = bytes_per_diff * i;
let bytes = [raw_diffs[pos], raw_diffs[pos + 1]];
let diff = u16::from_le_bytes(bytes);
line_start = line_start + RelativeBytePos(diff as u32);
line_start
}));
}
4 => {
lines.extend((0..*num_diffs).map(|i| {
let pos = bytes_per_diff * i;
let bytes = [
raw_diffs[pos],
raw_diffs[pos + 1],
raw_diffs[pos + 2],
raw_diffs[pos + 3],
];
let diff = u32::from_le_bytes(bytes);
line_start = line_start + RelativeBytePos(diff);
line_start
}));
}
_ => unreachable!(),
}
let res = f(&lines);
*guard = SourceFileLines::Lines(lines);
res
} }
};
// Convert from "diffs" form to "lines" form.
let num_lines = num_diffs + 1;
let mut lines = Vec::with_capacity(num_lines);
let mut line_start = RelativeBytePos(0);
lines.push(line_start);
assert_eq!(*num_diffs, raw_diffs.len() / bytes_per_diff);
match bytes_per_diff {
1 => {
lines.extend(raw_diffs.into_iter().map(|&diff| {
line_start = line_start + RelativeBytePos(diff as u32);
line_start
}));
}
2 => {
lines.extend((0..*num_diffs).map(|i| {
let pos = bytes_per_diff * i;
let bytes = [raw_diffs[pos], raw_diffs[pos + 1]];
let diff = u16::from_le_bytes(bytes);
line_start = line_start + RelativeBytePos(diff as u32);
line_start
}));
}
4 => {
lines.extend((0..*num_diffs).map(|i| {
let pos = bytes_per_diff * i;
let bytes = [
raw_diffs[pos],
raw_diffs[pos + 1],
raw_diffs[pos + 2],
raw_diffs[pos + 3],
];
let diff = u32::from_le_bytes(bytes);
line_start = line_start + RelativeBytePos(diff);
line_start
}));
}
_ => unreachable!(),
} }
*guard = SourceFileLines::Lines(lines);
FreezeWriteGuard::freeze(guard);
}
pub fn lines(&self) -> &[RelativeBytePos] {
if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
return &lines[..];
}
cold_path(|| {
self.convert_diffs_to_lines_frozen();
if let Some(SourceFileLines::Lines(lines)) = self.lines.get() {
return &lines[..];
}
unreachable!()
})
} }
/// Returns the `BytePos` of the beginning of the current line. /// Returns the `BytePos` of the beginning of the current line.
pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
let pos = self.relative_position(pos); let pos = self.relative_position(pos);
let line_index = self.lookup_line(pos).unwrap(); let line_index = self.lookup_line(pos).unwrap();
let line_start_pos = self.lines(|lines| lines[line_index]); let line_start_pos = self.lines()[line_index];
self.absolute_position(line_start_pos) self.absolute_position(line_start_pos)
} }
@ -1662,7 +1681,7 @@ impl SourceFile {
} }
let begin = { let begin = {
let line = self.lines(|lines| lines.get(line_number).copied())?; let line = self.lines().get(line_number).copied()?;
line.to_usize() line.to_usize()
}; };
@ -1686,7 +1705,7 @@ impl SourceFile {
} }
pub fn count_lines(&self) -> usize { pub fn count_lines(&self) -> usize {
self.lines(|lines| lines.len()) self.lines().len()
} }
#[inline] #[inline]
@ -1709,7 +1728,7 @@ impl SourceFile {
/// number. If the source_file is empty or the position is located before the /// number. If the source_file is empty or the position is located before the
/// first line, `None` is returned. /// first line, `None` is returned.
pub fn lookup_line(&self, pos: RelativeBytePos) -> Option<usize> { pub fn lookup_line(&self, pos: RelativeBytePos) -> Option<usize> {
self.lines(|lines| lines.partition_point(|x| x <= &pos).checked_sub(1)) self.lines().partition_point(|x| x <= &pos).checked_sub(1)
} }
pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> { pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> {
@ -1717,15 +1736,13 @@ impl SourceFile {
return self.start_pos..self.start_pos; return self.start_pos..self.start_pos;
} }
self.lines(|lines| { let lines = self.lines();
assert!(line_index < lines.len()); assert!(line_index < lines.len());
if line_index == (lines.len() - 1) { if line_index == (lines.len() - 1) {
self.absolute_position(lines[line_index])..self.end_position() self.absolute_position(lines[line_index])..self.end_position()
} else { } else {
self.absolute_position(lines[line_index]) self.absolute_position(lines[line_index])..self.absolute_position(lines[line_index + 1])
..self.absolute_position(lines[line_index + 1]) }
}
})
} }
/// Returns whether or not the file contains the given `SourceMap` byte /// Returns whether or not the file contains the given `SourceMap` byte
@ -1811,7 +1828,7 @@ impl SourceFile {
match self.lookup_line(pos) { match self.lookup_line(pos) {
Some(a) => { Some(a) => {
let line = a + 1; // Line numbers start at 1 let line = a + 1; // Line numbers start at 1
let linebpos = self.lines(|lines| lines[a]); let linebpos = self.lines()[a];
let linechpos = self.bytepos_to_file_charpos(linebpos); let linechpos = self.bytepos_to_file_charpos(linebpos);
let col = chpos - linechpos; let col = chpos - linechpos;
debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos);
@ -1831,7 +1848,7 @@ impl SourceFile {
let (line, col_or_chpos) = self.lookup_file_pos(pos); let (line, col_or_chpos) = self.lookup_file_pos(pos);
if line > 0 { if line > 0 {
let col = col_or_chpos; let col = col_or_chpos;
let linebpos = self.lines(|lines| lines[line - 1]); let linebpos = self.lines()[line - 1];
let col_display = { let col_display = {
let start_width_idx = self let start_width_idx = self
.non_narrow_chars .non_narrow_chars

View file

@ -328,7 +328,7 @@ impl SourceMap {
name_hash: Hash128, name_hash: Hash128,
source_len: u32, source_len: u32,
cnum: CrateNum, cnum: CrateNum,
file_local_lines: Lock<SourceFileLines>, file_local_lines: FreezeLock<SourceFileLines>,
multibyte_chars: Vec<MultiByteChar>, multibyte_chars: Vec<MultiByteChar>,
non_narrow_chars: Vec<NonNarrowChar>, non_narrow_chars: Vec<NonNarrowChar>,
normalized_pos: Vec<NormalizedPos>, normalized_pos: Vec<NormalizedPos>,

View file

@ -1,6 +1,6 @@
use super::*; use super::*;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::{FreezeLock, Lrc};
fn init_source_map() -> SourceMap { fn init_source_map() -> SourceMap {
let sm = SourceMap::new(FilePathMapping::empty()); let sm = SourceMap::new(FilePathMapping::empty());
@ -246,7 +246,7 @@ fn t10() {
name_hash, name_hash,
source_len.to_u32(), source_len.to_u32(),
CrateNum::new(0), CrateNum::new(0),
lines, FreezeLock::new(lines.read().clone()),
multibyte_chars, multibyte_chars,
non_narrow_chars, non_narrow_chars,
normalized_pos, normalized_pos,

View file

@ -7,9 +7,7 @@ fn test_lookup_line() {
SourceFile::new(FileName::Anon(Hash64::ZERO), source, SourceFileHashAlgorithm::Sha256) SourceFile::new(FileName::Anon(Hash64::ZERO), source, SourceFileHashAlgorithm::Sha256)
.unwrap(); .unwrap();
sf.start_pos = BytePos(3); sf.start_pos = BytePos(3);
sf.lines(|lines| { assert_eq!(sf.lines(), &[RelativeBytePos(0), RelativeBytePos(14), RelativeBytePos(25)]);
assert_eq!(lines, &[RelativeBytePos(0), RelativeBytePos(14), RelativeBytePos(25)])
});
assert_eq!(sf.lookup_line(RelativeBytePos(0)), Some(0)); assert_eq!(sf.lookup_line(RelativeBytePos(0)), Some(0));
assert_eq!(sf.lookup_line(RelativeBytePos(1)), Some(0)); assert_eq!(sf.lookup_line(RelativeBytePos(1)), Some(0));

View file

@ -507,20 +507,18 @@ fn item_has_safety_comment(cx: &LateContext<'_>, item: &hir::Item<'_>) -> HasSaf
&& Lrc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf) && Lrc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf)
&& let Some(src) = unsafe_line.sf.src.as_deref() && let Some(src) = unsafe_line.sf.src.as_deref()
{ {
return unsafe_line.sf.lines(|lines| { return if comment_start_line.line >= unsafe_line.line {
if comment_start_line.line >= unsafe_line.line { HasSafetyComment::No
HasSafetyComment::No } else {
} else { match text_has_safety_comment(
match text_has_safety_comment( src,
src, &unsafe_line.sf.lines()[comment_start_line.line + 1..=unsafe_line.line],
&lines[comment_start_line.line + 1..=unsafe_line.line], unsafe_line.sf.start_pos,
unsafe_line.sf.start_pos, ) {
) { Some(b) => HasSafetyComment::Yes(b),
Some(b) => HasSafetyComment::Yes(b), None => HasSafetyComment::No,
None => HasSafetyComment::No,
}
} }
}); };
} }
} }
HasSafetyComment::Maybe HasSafetyComment::Maybe
@ -551,20 +549,18 @@ fn stmt_has_safety_comment(cx: &LateContext<'_>, span: Span, hir_id: HirId) -> H
&& Lrc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf) && Lrc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf)
&& let Some(src) = unsafe_line.sf.src.as_deref() && let Some(src) = unsafe_line.sf.src.as_deref()
{ {
return unsafe_line.sf.lines(|lines| { return if comment_start_line.line >= unsafe_line.line {
if comment_start_line.line >= unsafe_line.line { HasSafetyComment::No
HasSafetyComment::No } else {
} else { match text_has_safety_comment(
match text_has_safety_comment( src,
src, &unsafe_line.sf.lines()[comment_start_line.line + 1..=unsafe_line.line],
&lines[comment_start_line.line + 1..=unsafe_line.line], unsafe_line.sf.start_pos,
unsafe_line.sf.start_pos, ) {
) { Some(b) => HasSafetyComment::Yes(b),
Some(b) => HasSafetyComment::Yes(b), None => HasSafetyComment::No,
None => HasSafetyComment::No,
}
} }
}); };
} }
} }
HasSafetyComment::Maybe HasSafetyComment::Maybe
@ -614,20 +610,18 @@ fn span_from_macro_expansion_has_safety_comment(cx: &LateContext<'_>, span: Span
&& Lrc::ptr_eq(&unsafe_line.sf, &macro_line.sf) && Lrc::ptr_eq(&unsafe_line.sf, &macro_line.sf)
&& let Some(src) = unsafe_line.sf.src.as_deref() && let Some(src) = unsafe_line.sf.src.as_deref()
{ {
unsafe_line.sf.lines(|lines| { if macro_line.line < unsafe_line.line {
if macro_line.line < unsafe_line.line { match text_has_safety_comment(
match text_has_safety_comment( src,
src, &unsafe_line.sf.lines()[macro_line.line + 1..=unsafe_line.line],
&lines[macro_line.line + 1..=unsafe_line.line], unsafe_line.sf.start_pos,
unsafe_line.sf.start_pos, ) {
) { Some(b) => HasSafetyComment::Yes(b),
Some(b) => HasSafetyComment::Yes(b), None => HasSafetyComment::No,
None => HasSafetyComment::No,
}
} else {
HasSafetyComment::No
} }
}) } else {
HasSafetyComment::No
}
} else { } else {
// Problem getting source text. Pretend a comment was found. // Problem getting source text. Pretend a comment was found.
HasSafetyComment::Maybe HasSafetyComment::Maybe
@ -671,13 +665,11 @@ fn span_in_body_has_safety_comment(cx: &LateContext<'_>, span: Span) -> bool {
// Get the text from the start of function body to the unsafe block. // Get the text from the start of function body to the unsafe block.
// fn foo() { some_stuff; unsafe { stuff }; other_stuff; } // fn foo() { some_stuff; unsafe { stuff }; other_stuff; }
// ^-------------^ // ^-------------^
unsafe_line.sf.lines(|lines| { body_line.line < unsafe_line.line && text_has_safety_comment(
body_line.line < unsafe_line.line && text_has_safety_comment( src,
src, &unsafe_line.sf.lines()[body_line.line + 1..=unsafe_line.line],
&lines[body_line.line + 1..=unsafe_line.line], unsafe_line.sf.start_pos,
unsafe_line.sf.start_pos, ).is_some()
).is_some()
})
} else { } else {
// Problem getting source text. Pretend a comment was found. // Problem getting source text. Pretend a comment was found.
true true

View file

@ -118,7 +118,7 @@ fn first_char_in_first_line<T: LintContext>(cx: &T, span: Span) -> Option<BytePo
fn line_span<T: LintContext>(cx: &T, span: Span) -> Span { fn line_span<T: LintContext>(cx: &T, span: Span) -> Span {
let span = original_sp(span, DUMMY_SP); let span = original_sp(span, DUMMY_SP);
let SourceFileAndLine { sf, line } = cx.sess().source_map().lookup_line(span.lo()).unwrap(); let SourceFileAndLine { sf, line } = cx.sess().source_map().lookup_line(span.lo()).unwrap();
let line_start = sf.lines(|lines| lines[line]); let line_start = sf.lines()[line];
let line_start = sf.absolute_position(line_start); let line_start = sf.absolute_position(line_start);
span.with_lo(line_start) span.with_lo(line_start)
} }